Schema Builder
The knex.schema is a getter function, which returns a stateful object containing the query. Therefore be sure to obtain a new instance of the knex.schema for every query. These methods return promises.
Dialect badges: [-SQ] = not supported by SQLite. [~SQ] = emulated in SQLite (multiple statements/workarounds).
Essentials
withSchema
knex.schema.withSchema([schemaName])
Specifies the schema to be used when using the schema-building commands.
knex.schema.withSchema('public').createTable('users', function (table) {
table.increments();
});create table "public"."users" ("id" serial primary key)CREATE TABLE [public].[users] ([id] int identity(1,1) not null primary key)create table `public`.`users` (`id` int unsigned not null auto_increment primary key)create table "public"."users" ("id" integer not null primary key)
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "public"."users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = 'public' AND cols.table_name = 'users'; execute immediate ('create or replace trigger "public"."users_autoinc_trg" BEFORE INSERT on "public"."users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "public"."users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "public"."users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;create table "public"."users" ("id" serial primary key)create table "public"."users" ("id" integer identity(1,1) primary key not null)create table `public`.`users` (`id` integer not null primary key autoincrement)createTable
knex.schema.createTable(tableName, callback)
Creates a new table on the database, with a callback function to modify the table's structure, using the schema-building commands.
knex.schema.createTable('users', function (table) {
table.increments();
table.string('name');
table.timestamps();
});create table "users" ("id" serial primary key, "name" varchar(255), "created_at" timestamptz, "updated_at" timestamptz)CREATE TABLE [users] ([id] int identity(1,1) not null primary key, [name] nvarchar(255), [created_at] datetime2, [updated_at] datetime2)create table `users` (`id` int unsigned not null auto_increment primary key, `name` varchar(255), `created_at` datetime, `updated_at` datetime)create table "users" ("id" integer not null primary key, "name" varchar2(255), "created_at" timestamp with local time zone, "updated_at" timestamp with local time zone)
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; execute immediate ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;create table "users" ("id" serial primary key, "name" varchar(255), "created_at" timestamptz, "updated_at" timestamptz)create table "users" ("id" integer identity(1,1) primary key not null, "name" varchar(255), "created_at" timestamptz, "updated_at" timestamptz)create table `users` (`id` integer not null primary key autoincrement, `name` varchar(255), `created_at` datetime, `updated_at` datetime)createTableLike
knex.schema.createTableLike(tableName, tableNameToCopy, [callback])
Creates a new table on the database based on another table. Copy only the structure : columns, keys and indexes (expected on SQL Server which only copy columns) and not the data. Callback function can be specified to add columns in the duplicated table.
knex.schema.createTableLike('new_users', 'users');
// "new_users" table contains columns
// of users and two new columns 'age' and 'last_name'.
knex.schema.createTableLike('new_users', 'users', (table) => {
table.integer('age');
table.string('last_name');
});create table "new_users" (like "users" including all)
-- ----
create table "new_users" (like "users" including all, "age" integer, "last_name" varchar(255))SELECT * INTO [new_users] FROM [users] WHERE 0=1
-- ----
SELECT * INTO [new_users] FROM [users] WHERE 0=1
ALTER TABLE [new_users] ADD [age] int, [last_name] nvarchar(255)create table `new_users` like `users`
-- ----
create table `new_users` like `users`
alter table `new_users` add `age` int, add `last_name` varchar(255)create table "new_users" as (select * from "users" where 0=1)
-- ----
create table "new_users" as (select * from "users" where 0=1)
alter table "new_users" add ("age" integer, "last_name" varchar2(255))create table "new_users" (like "users" including all)
-- ----
create table "new_users" (like "users" including all, "age" integer, "last_name" varchar(255))create table "new_users" (like "users")
-- ----
create table "new_users" (like "users")
alter table "new_users" add column "age" integer
alter table "new_users" add column "last_name" varchar(255)create table `new_users` as select * from `users` where 0=1
-- ----
create table `new_users` as select * from `users` where 0=1
alter table `new_users` add column `age` integer
alter table `new_users` add column `last_name` varchar(255)dropTable
knex.schema.dropTable(tableName)
Drops a table, specified by tableName.
knex.schema.dropTable('users');drop table "users"DROP TABLE [users]drop table `users`drop table "users"
begin execute immediate 'drop sequence "users_seq"'; exception when others then if sqlcode != -2289 then raise; end if; end;drop table "users"drop table "users"drop table `users`dropTableIfExists
knex.schema.dropTableIfExists(tableName)
Drops a table conditionally if the table exists, specified by tableName.
knex.schema.dropTableIfExists('users');drop table if exists "users"if object_id('[users]', 'U') is not null DROP TABLE [users]drop table if exists `users`begin execute immediate 'drop table "users"'; exception when others then if sqlcode != -942 then raise; end if; end;
begin execute immediate 'drop sequence "users_seq"'; exception when others then if sqlcode != -2289 then raise; end if; end;drop table if exists "users"drop table if exists "users"drop table if exists `users`renameTable
knex.schema.renameTable(from, to)
Renames a table from a current tableName to another.
knex.schema.renameTable('old_users', 'users');alter table "old_users" rename to "users"exec sp_rename 'old_users', 'users'rename table `old_users` to `users`DECLARE PK_NAME VARCHAR(200); IS_AUTOINC NUMBER := 0; BEGIN EXECUTE IMMEDIATE ('RENAME "old_users" TO "users"'); SELECT COUNT(*) INTO IS_AUTOINC from "USER_TRIGGERS" where trigger_name = 'old_users_autoinc_trg'; IF (IS_AUTOINC > 0) THEN EXECUTE IMMEDIATE ('DROP TRIGGER "old_users_autoinc_trg"'); EXECUTE IMMEDIATE ('RENAME "old_users_seq" TO "users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; EXECUTE IMMEDIATE ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); end if;END;alter table "old_users" rename to "users"alter table "old_users" rename to "users"alter table `old_users` rename to `users`hasTable
knex.schema.hasTable(tableName)
Checks for a table's existence by tableName, resolving with a boolean to signal if the table exists.
knex.schema.hasTable('users');select * from information_schema.tables where table_name = 'users' and table_schema = current_schema()SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME = 'users'select * from information_schema.tables where table_name = 'users' and table_schema = database()select TABLE_NAME from USER_TABLES where TABLE_NAME = 'users'select * from information_schema.tables where table_name = 'users' and table_schema = current_schema()select * from information_schema.tables where table_name = 'users' and table_schema = current_schema()select * from sqlite_master where type = 'table' and name = 'users'hasColumn
knex.schema.hasColumn(tableName, columnName)
Checks if a column exists in the current table, resolves the promise with a boolean, true if the column exists, false otherwise.
knex.schema.hasColumn('users', 'email');select * from information_schema.columns where table_name = 'users' and column_name = 'email' and table_schema = current_schema()select object_id from sys.columns where name = 'email' and object_id = object_id('[users]')show columns from `users`select COLUMN_NAME from ALL_TAB_COLUMNS where TABLE_NAME = 'users' and COLUMN_NAME = 'email'select * from information_schema.columns where table_name = 'users' and column_name = 'email' and table_schema = current_schema()select * from information_schema.columns where table_name = 'users' and column_name = 'email' and table_schema = current_schema()PRAGMA table_info(`users`)table
knex.schema.table(tableName, callback)
Chooses a database table, and then modifies the table, using the Schema Building functions inside of the callback.
knex.schema.table('users', function (table) {
table.dropColumn('name');
table.string('first_name');
table.string('last_name');
});alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)
alter table "users" drop column "name"ALTER TABLE [users] ADD [first_name] nvarchar(255), [last_name] nvarchar(255)
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'users'
AND all_columns.name = 'name')
IF @constraint IS NOT NULL EXEC('ALTER TABLE users DROP CONSTRAINT ' + @constraint)
ALTER TABLE [users] DROP COLUMN [name]alter table `users` add `first_name` varchar(255), add `last_name` varchar(255)
alter table `users` drop `name`alter table "users" add ("first_name" varchar2(255), "last_name" varchar2(255))
alter table "users" drop ("name")alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)
alter table "users" drop column "name"alter table "users" add column "first_name" varchar(255)
alter table "users" add column "last_name" varchar(255)
alter table "users" drop column "name"alter table `users` add column `first_name` varchar(255)
alter table `users` add column `last_name` varchar(255)
PRAGMA table_info(`users`)alterTable
knex.schema.alterTable(tableName, callback)
Chooses a database table, and then modifies the table, using the Schema Building functions inside of the callback.
knex.schema.alterTable('users', function (table) {
table.dropColumn('name');
table.string('first_name');
table.string('last_name');
});alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)
alter table "users" drop column "name"ALTER TABLE [users] ADD [first_name] nvarchar(255), [last_name] nvarchar(255)
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'users'
AND all_columns.name = 'name')
IF @constraint IS NOT NULL EXEC('ALTER TABLE users DROP CONSTRAINT ' + @constraint)
ALTER TABLE [users] DROP COLUMN [name]alter table `users` add `first_name` varchar(255), add `last_name` varchar(255)
alter table `users` drop `name`alter table "users" add ("first_name" varchar2(255), "last_name" varchar2(255))
alter table "users" drop ("name")alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)
alter table "users" drop column "name"alter table "users" add column "first_name" varchar(255)
alter table "users" add column "last_name" varchar(255)
alter table "users" drop column "name"alter table `users` add column `first_name` varchar(255)
alter table `users` add column `last_name` varchar(255)
PRAGMA table_info(`users`)createView
knex.schema.createView(tableName, callback)
Creates a new view on the database, with a callback function to modify the view's structure, using the schema-building commands.
knex.schema.createView('users_view', function (view) {
view.columns(['first_name']);
view.as(knex('users').select('first_name').where('age', '>', '18'));
});create view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'CREATE VIEW [users_view] ([first_name]) AS select [first_name] from [users] where [age] > '18'create view `users_view` (`first_name`) as select `first_name` from `users` where `age` > '18'create view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'create view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'create view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'create view `users_view` (`first_name`) as select `first_name` from `users` where `age` > '18'createViewOrReplace ~SQ
knex.schema.createViewOrReplace(tableName, callback)
Creates a new view or replace it on the database, with a callback function to modify the view's structure, using the schema-building commands. You need to specify at least the same columns in same order (you can add extra columns). In SQLite, this function generate drop/create view queries (view columns can be different).
knex.schema.createViewOrReplace('users_view', function (view) {
view.columns(['first_name']);
view.as(knex('users').select('first_name').where('age', '>', '18'));
});create or replace view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'CREATE OR ALTER VIEW [users_view] ([first_name]) AS select [first_name] from [users] where [age] > '18'create or replace view `users_view` (`first_name`) as select `first_name` from `users` where `age` > '18'create or replace view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'create or replace view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'create or replace view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'drop view if exists `users_view`
create view `users_view` (`first_name`) as select `first_name` from `users` where `age` > '18'createMaterializedView MYSQMS
knex.schema.createMaterializedView(viewName, callback)
Creates a new materialized view on the database, with a callback function to modify the view's structure, using the schema-building commands. Only on PostgreSQL, CockroachDb, Redshift and Oracle.
knex.schema.createMaterializedView('users_view', function (view) {
view.columns(['first_name']);
view.as(knex('users').select('first_name').where('age', '>', '18'));
});create materialized view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'Error: materialized views are not supported by this dialect.Error: materialized views are not supported by this dialect.create materialized view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'create materialized view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'create materialized view "users_view" ("first_name") as select "first_name" from "users" where "age" > '18'Error: materialized views are not supported by this dialect.refreshMaterializedView MYSQMS
knex.schema.refreshMaterializedView(viewName)
Refresh materialized view on the database. Only on PostgreSQL, CockroachDb, Redshift and Oracle.
knex.schema.refreshMaterializedView('users_view');refresh materialized view "users_view"Error: materialized views are not supported by this dialect.Error: materialized views are not supported by this dialect.BEGIN DBMS_MVIEW.REFRESH('users_view'); END;refresh materialized view "users_view"refresh materialized view "users_view"Error: materialized views are not supported by this dialect.dropView
knex.schema.dropView(viewName)
Drop view on the database.
knex.schema.dropView('users_view');drop view "users_view"drop view [users_view]drop view `users_view`drop view "users_view"drop view "users_view"drop view "users_view"drop view `users_view`dropViewIfExists
knex.schema.dropViewIfExists(viewName)
Drop view on the database if exists.
knex.schema.dropViewIfExists('users_view');drop view if exists "users_view"if object_id('[users_view]', 'V') is not null DROP VIEW [users_view]drop view if exists `users_view`begin execute immediate 'drop view "users_view"'; exception when others then if sqlcode != -942 then raise; end if; end;
begin execute immediate 'drop sequence "users_view_seq"'; exception when others then if sqlcode != -2289 then raise; end if; end;drop view if exists "users_view"drop view if exists "users_view"drop view if exists `users_view`dropMaterializedView MYSQMS
knex.schema.dropMaterializedView(viewName)
Drop materialized view on the database. Only on PostgreSQL, CockroachDb, Redshift and Oracle.
knex.schema.dropMaterializedView('users_view');drop materialized view "users_view"Error: materialized views are not supported by this dialect.Error: materialized views are not supported by this dialect.drop materialized view "users_view"drop materialized view "users_view"drop materialized view "users_view"Error: materialized views are not supported by this dialect.dropMaterializedViewIfExists MYSQMS
knex.schema.dropMaterializedViewIfExists(viewName)
Drop materialized view on the database if exists. Only on PostgreSQL, CockroachDb, Redshift and Oracle.
knex.schema.dropMaterializedViewIfExists('users_view');drop materialized view if exists "users_view"Error: materialized views are not supported by this dialect.Error: materialized views are not supported by this dialect.begin execute immediate 'drop materialized view "users_view"'; exception when others then if sqlcode != -12003 then raise; end if; end;
begin execute immediate 'drop sequence "users_view_seq"'; exception when others then if sqlcode != -2289 then raise; end if; end;drop materialized view if exists "users_view"drop materialized view if exists "users_view"Error: materialized views are not supported by this dialect.renameView ORSQ
knex.schema.renameView(viewName)
Rename a existing view in the database. Not supported by Oracle and SQLite.
knex.schema.renameView('users_view');alter view "users_view" rename to "undefined"Error: Undefined binding(s) detected for keys [1] when compiling RAW query: exec sp_rename ?, ?rename table `users_view` to `undefined`Error: rename view is not supported by this dialect (instead drop then create another view).alter view "users_view" rename to "undefined"alter view "users_view" rename to "undefined"Error: rename view is not supported by this dialect (instead drop then create another view).alterView MYSQORCR
knex.schema.alterView(viewName)
Alter view to rename columns or change default values. Only available on PostgreSQL, MSSQL and Redshift.
knex.schema.alterView('view_test', function (view) {
view.column('first_name').rename('name_user');
view.column('bio').defaultTo('empty');
});Error: rename column of views is not supported by this dialect.Error: change default values of views is not supported by this dialect.Error: rename column of views is not supported by this dialect.Error: rename column of views is not supported by this dialect.alter view "view_test" rename "first_name" to "name_user"
alter view "view_test" alter "bio" set default emptyalter view "view_test" rename "first_name" to "name_user"
alter view "view_test" alter "bio" set default emptyError: rename column of views is not supported by this dialect.generateDdlCommands
knex.schema.generateDdlCommands()
Generates complete SQL commands for applying described schema changes, without executing anything. Useful when knex is being used purely as a query builder. Generally produces same result as .toSQL(), with a notable exception with SQLite, which relies on asynchronous calls to the database for building part of its schema modification statements
const ddlCommands = knex.schema
.alterTable('users', (table) => {
table
.foreign('companyId')
.references('company.companyId')
.withKeyName('fk_fkey_company');
})
.generateDdlCommands();alter table "users" add constraint "fk_fkey_company" foreign key ("companyId") references "company" ("companyId")ALTER TABLE [users] ADD CONSTRAINT [fk_fkey_company] FOREIGN KEY ([companyId]) REFERENCES [company] ([companyId])alter table `users` add constraint `fk_fkey_company` foreign key (`companyId`) references `company` (`companyId`)alter table "users" add constraint "fk_fkey_company" foreign key ("companyId") references "company" ("companyId")alter table "users" add constraint "fk_fkey_company" foreign key ("companyId") references "company" ("companyId")alter table "users" add constraint "fk_fkey_company" foreign key ("companyId") references "company" ("companyId")CREATE TABLE `_knex_temp_alter922` (`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, `first_name` varchar(255), `last_name` varchar(255), `email` varchar(255), `age` integer, `companyId` integer, `user_id` integer, FOREIGN KEY (`companyId`) REFERENCES `company` (`companyId`), CONSTRAINT `fk_fkey_company` FOREIGN KEY (`companyId`) REFERENCES `company` (`companyId`))
INSERT INTO "_knex_temp_alter922" SELECT * FROM "users";
DROP TABLE "users"
ALTER TABLE "_knex_temp_alter922" RENAME TO "users"raw
knex.schema.raw(statement)
Run an arbitrary sql query in the schema builder chain.
knex.schema.raw("SET sql_mode='TRADITIONAL'").table('users', function (table) {
table.dropColumn('name');
table.string('first_name');
table.string('last_name');
});SET sql_mode='TRADITIONAL'
alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)
alter table "users" drop column "name"SET sql_mode='TRADITIONAL'
ALTER TABLE [users] ADD [first_name] nvarchar(255), [last_name] nvarchar(255)
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'users'
AND all_columns.name = 'name')
IF @constraint IS NOT NULL EXEC('ALTER TABLE users DROP CONSTRAINT ' + @constraint)
ALTER TABLE [users] DROP COLUMN [name]SET sql_mode='TRADITIONAL'
alter table `users` add `first_name` varchar(255), add `last_name` varchar(255)
alter table `users` drop `name`SET sql_mode='TRADITIONAL'
alter table "users" add ("first_name" varchar2(255), "last_name" varchar2(255))
alter table "users" drop ("name")SET sql_mode='TRADITIONAL'
alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)
alter table "users" drop column "name"SET sql_mode='TRADITIONAL'
alter table "users" add column "first_name" varchar(255)
alter table "users" add column "last_name" varchar(255)
alter table "users" drop column "name"SET sql_mode='TRADITIONAL'
alter table `users` add column `first_name` varchar(255)
alter table `users` add column `last_name` varchar(255)
PRAGMA table_info(`users`)queryContext
knex.schema.queryContext(context)
Allows configuring a context to be passed to the wrapIdentifier hook. The context can be any kind of value and will be passed to wrapIdentifier without modification.
knex.schema.queryContext({ foo: 'bar' }).table('users', function (table) {
table.string('first_name');
table.string('last_name');
});alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)ALTER TABLE [users] ADD [first_name] nvarchar(255), [last_name] nvarchar(255)alter table `users` add `first_name` varchar(255), add `last_name` varchar(255)alter table "users" add ("first_name" varchar2(255), "last_name" varchar2(255))alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)alter table "users" add column "first_name" varchar(255)
alter table "users" add column "last_name" varchar(255)alter table `users` add column `first_name` varchar(255)
alter table `users` add column `last_name` varchar(255)The context configured will be passed to wrapIdentifier for each identifier that needs to be formatted, including the table and column names. However, a different context can be set for the column names via table.queryContext.
Calling queryContext with no arguments will return any context configured for the schema builder instance.
createSchema PG only
knex.schema.createSchema(schemaName)
Creates a new schema. Only supported by PostgreSQL.
//create schema 'public'
knex.schema.createSchema('public');createSchemaIfNotExists PG only
knex.schema.createSchemaIfNotExists(schemaName)
Creates a new schema conditionally if the schema doesnt exist. Only supported by PostgreSQL.
//create schema 'public'
knex.schema.createSchemaIfNotExists('public');dropSchema PG only
knex.schema.dropSchema(schemaName, [cascade])
Drop a schema, specified by the schema's name, with optional cascade option (default to false). Only supported by PostgreSQL.
//drop schema 'public'
knex.schema.dropSchema('public');
//drop schema 'public' cascade
knex.schema.dropSchema('public', true);drop schema "public"
-- ----
drop schema "public" cascadeError: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).Error: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).Error: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).drop schema "public"
-- ----
drop schema "public" cascadedrop schema "public"
-- ----
drop schema "public" cascadeError: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchema is not supported for this dialect (only PostgreSQL supports it currently).dropSchemaIfExists PG only
knex.schema.dropSchemaIfExists(schemaName, [cascade])
Drop a schema conditionally if the schema exists, specified by the schema's name, with optional cascade option (default to false). Only supported by PostgreSQL.
//drop schema if exists 'public'
knex.schema.dropSchemaIfExists('public');
//drop schema if exists 'public' cascade
knex.schema.dropSchemaIfExists('public', true);drop schema if exists "public"
-- ----
drop schema if exists "public" cascadeError: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).Error: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).Error: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).drop schema if exists "public"
-- ----
drop schema if exists "public" cascadedrop schema if exists "public"
-- ----
drop schema if exists "public" cascadeError: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).
-- ----
Error: dropSchemaIfExists is not supported for this dialect (only PostgreSQL supports it currently).Schema Building
dropColumn ~SQ
table.dropColumn(name)
Drops a column, specified by the column's name
knex.schema.table('users', function (table) {
table.dropColumn('name');
});alter table "users" drop column "name"
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'users'
AND all_columns.name = 'name')
IF @constraint IS NOT NULL EXEC('ALTER TABLE users DROP CONSTRAINT ' + @constraint)
ALTER TABLE [users] DROP COLUMN [name]alter table `users` drop `name`alter table "users" drop ("name")alter table "users" drop column "name"alter table "users" drop column "name"PRAGMA table_info(`users`)dropColumns ~SQ
table.dropColumns(columns)
Drops multiple columns, taking a variable number of column names.
knex.schema.table('users', function (table) {
table.dropColumns('first_name', 'last_name');
});alter table "users" drop column "first_name", drop column "last_name"
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'users'
AND all_columns.name = 'first_name')
IF @constraint IS NOT NULL EXEC('ALTER TABLE users DROP CONSTRAINT ' + @constraint)
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'users'
AND all_columns.name = 'last_name')
IF @constraint IS NOT NULL EXEC('ALTER TABLE users DROP CONSTRAINT ' + @constraint)
ALTER TABLE [users] DROP COLUMN [first_name], [last_name]alter table `users` drop `first_name`, drop `last_name`alter table "users" drop ("first_name", "last_name")alter table "users" drop column "first_name", drop column "last_name"alter table "users" drop column "first_name", drop column "last_name"PRAGMA table_info(`users`)renameColumn
table.renameColumn(from, to)
Renames a column from one name to another.
knex.schema.table('users', function (table) {
table.renameColumn('name', 'username');
});alter table "users" rename "name" to "username"exec sp_rename '[users].name', 'username', 'COLUMN'alter table `users` change `name` `username` varchar(255) NULL COLLATE 'utf8mb4_0900_ai_ci'DECLARE PK_NAME VARCHAR(200); IS_AUTOINC NUMBER := 0; BEGIN EXECUTE IMMEDIATE ('ALTER TABLE "users" RENAME COLUMN "name" TO "username"'); SELECT COUNT(*) INTO IS_AUTOINC from "USER_TRIGGERS" where trigger_name = 'users_autoinc_trg'; IF (IS_AUTOINC > 0) THEN SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; IF ('username' = PK_NAME) THEN EXECUTE IMMEDIATE ('DROP TRIGGER "users_autoinc_trg"'); EXECUTE IMMEDIATE ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."username" is null) then while checking >= 1 loop select "users_seq".nextval into :new."username" from dual; select count("username") into checking from "users" where "username" = :new."username"; end loop; end if; end;'); end if; end if;END;alter table "users" rename "name" to "username"alter table "users" rename "name" to "username"alter table `users` rename `name` to `username`increments
table.increments(name, options={[primaryKey: boolean = true])
Adds an auto incrementing column. In PostgreSQL this is a serial; in Amazon Redshift an integer identity(1,1). This will be used as the primary key for the table if the column isn't in another primary key. Also available is a bigIncrements if you wish to add a bigint incrementing number (in PostgreSQL bigserial). Note that a primary key is created by default if the column isn't in primary key (with primary function), but you can override this behaviour by passing the primaryKey option. If you use this function with primary function, the column is added to the composite primary key. With SQLite, autoincrement column need to be a primary key, so if primary function is used, primary keys are transformed in unique index. MySQL don't support autoincrement column without primary key, so multiple queries are generated to create int column, add increments column to composite primary key then modify the column to autoincrement column.
// create table 'users'
// with a primary key using 'increments()'
knex.schema.createTable('users', function (table) {
table.increments('userId');
table.string('name');
});
// create table 'users'
// with a composite primary key ('userId', 'name').
// increments doesn't generate primary key.
knex.schema.createTable('users', function (table) {
table.primary(['userId', 'name']);
table.increments('userId');
table.string('name');
});
// reference the 'users' primary key in new table 'posts'
knex.schema.createTable('posts', function (table) {
table.integer('author').unsigned().notNullable();
table.string('title', 30);
table.string('content');
table.foreign('author').references('userId').inTable('users');
});create table "users" ("userId" serial primary key, "name" varchar(255))
-- ----
create table "users" ("userId" serial, "name" varchar(255), constraint "users_pkey" primary key ("userId", "name"))
-- ----
create table "posts" ("author" integer not null, "title" varchar(30), "content" varchar(255))
alter table "posts" add constraint "posts_author_foreign" foreign key ("author") references "users" ("userId")CREATE TABLE [users] ([userId] int identity(1,1) not null primary key, [name] nvarchar(255))
-- ----
CREATE TABLE [users] ([userId] int identity(1,1) not null, [name] nvarchar(255), CONSTRAINT [users_pkey] PRIMARY KEY ([userId], [name]))
-- ----
CREATE TABLE [posts] ([author] int not null, [title] nvarchar(30), [content] nvarchar(255), CONSTRAINT [posts_author_foreign] FOREIGN KEY ([author]) REFERENCES [users] ([userId]))create table `users` (`userId` int unsigned not null auto_increment primary key, `name` varchar(255))
-- ----
create table `users` (`userId` int unsigned not null, `name` varchar(255), primary key (`userId`, `name`))
alter table `users` modify column `userId` int unsigned not null auto_increment
-- ----
create table `posts` (`author` int unsigned not null, `title` varchar(30), `content` varchar(255))
alter table `posts` add constraint `posts_author_foreign` foreign key (`author`) references `users` (`userId`)create table "users" ("userId" integer not null primary key, "name" varchar2(255))
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; execute immediate ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;
-- ----
create table "users" ("userId" integer not null, "name" varchar2(255))
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; execute immediate ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;
alter table "users" add constraint "users_pkey" primary key ("userId", "name")
-- ----
create table "posts" ("author" integer not null, "title" varchar2(30), "content" varchar2(255))
alter table "posts" add constraint "posts_author_foreign" foreign key ("author") references "users" ("userId")create table "users" ("userId" serial primary key, "name" varchar(255))
-- ----
create table "users" ("userId" serial, "name" varchar(255), constraint "users_pkey" primary key ("userId", "name"))
-- ----
create table "posts" ("author" integer not null, "title" varchar(30), "content" varchar(255))
alter table "posts" add constraint "posts_author_foreign" foreign key ("author") references "users" ("userId")create table "users" ("userId" integer identity(1,1) primary key not null, "name" varchar(255))
-- ----
create table "users" ("userId" integer identity(1,1) primary key not null, "name" varchar(255))
-- ----
create table "posts" ("author" integer not null, "title" varchar(30), "content" varchar(255))
alter table "posts" add constraint "posts_author_foreign" foreign key ("author") references "users" ("userId")create table `users` (`userId` integer not null primary key autoincrement, `name` varchar(255))
-- ----
create table `users` (`userId` integer not null primary key autoincrement, `name` varchar(255), unique (`userId`, `name`))
-- ----
create table `posts` (`author` integer not null, `title` varchar(30), `content` varchar(255), foreign key(`author`) references `users`(`userId`))A primaryKey option may be passed, to disable to automatic primary key creation:
// create table 'users'
// with a primary key using 'increments()'
// but also increments field 'other_id'
// that does not need primary key
knex.schema.createTable('users', function (table) {
table.increments('id');
table.increments('other_id', { primaryKey: false });
});create table "users" ("id" serial primary key, "other_id" serial)CREATE TABLE [users] ([id] int identity(1,1) not null primary key, [other_id] int identity(1,1) not null)create table `users` (`id` int unsigned not null auto_increment primary key, `other_id` int unsigned not null)create table "users" ("id" integer not null primary key, "other_id" integer not null)
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; execute immediate ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; execute immediate ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;create table "users" ("id" serial primary key, "other_id" serial)create table "users" ("id" integer identity(1,1) primary key not null, "other_id" integer identity(1,1) not null)create table `users` (`id` integer not null primary key autoincrement, `other_id` integer not null primary key autoincrement)integer
table.integer(name, length)
Adds an integer column. On PostgreSQL you cannot adjust the length, you need to use other option such as bigInteger, etc
knex.schema.createTable('integer_example', function (table) {
table.integer('age');
});create table "integer_example" ("age" integer)CREATE TABLE [integer_example] ([age] int)create table `integer_example` (`age` int)create table "integer_example" ("age" integer)create table "integer_example" ("age" integer)create table "integer_example" ("age" integer)create table `integer_example` (`age` integer)bigInteger
table.bigInteger(name)
In MySQL or PostgreSQL, adds a bigint column, otherwise adds a normal integer. Note that bigint data is returned as a string in queries because JavaScript may be unable to parse them without loss of precision.
knex.schema.createTable('big_integer_example', function (table) {
table.bigInteger('total');
});create table "big_integer_example" ("total" bigint)CREATE TABLE [big_integer_example] ([total] bigint)create table `big_integer_example` (`total` bigint)create table "big_integer_example" ("total" number(20, 0))create table "big_integer_example" ("total" bigint)create table "big_integer_example" ("total" bigint)create table `big_integer_example` (`total` bigint)tinyint
table.tinyint(name, length)
Adds a tinyint column
knex.schema.createTable('tinyint_example', function (table) {
table.tinyint('flag');
});create table "tinyint_example" ("flag" smallint)CREATE TABLE [tinyint_example] ([flag] tinyint)create table `tinyint_example` (`flag` tinyint)create table "tinyint_example" ("flag" smallint)create table "tinyint_example" ("flag" smallint)create table "tinyint_example" ("flag" smallint)create table `tinyint_example` (`flag` tinyint)smallint
table.smallint(name)
Adds a smallint column
knex.schema.createTable('smallint_example', function (table) {
table.smallint('rank');
});create table "smallint_example" ("rank" smallint)CREATE TABLE [smallint_example] ([rank] smallint)create table `smallint_example` (`rank` smallint)create table "smallint_example" ("rank" smallint)create table "smallint_example" ("rank" smallint)create table "smallint_example" ("rank" smallint)create table `smallint_example` (`rank` integer)mediumint
table.mediumint(name)
Adds a mediumint column
knex.schema.createTable('mediumint_example', function (table) {
table.mediumint('counter');
});create table "mediumint_example" ("counter" integer)CREATE TABLE [mediumint_example] ([counter] int)create table `mediumint_example` (`counter` mediumint)create table "mediumint_example" ("counter" integer)create table "mediumint_example" ("counter" integer)create table "mediumint_example" ("counter" integer)create table `mediumint_example` (`counter` integer)bigint
table.bigint(name)
Adds a bigint column
knex.schema.createTable('bigint_example', function (table) {
table.bigint('counter');
});create table "bigint_example" ("counter" bigint)CREATE TABLE [bigint_example] ([counter] bigint)create table `bigint_example` (`counter` bigint)create table "bigint_example" ("counter" number(20, 0))create table "bigint_example" ("counter" bigint)create table "bigint_example" ("counter" bigint)create table `bigint_example` (`counter` bigint)text
table.text(name, [textType])
Adds a text column, with optional textType for MySql text datatype preference. textType may be mediumtext or longtext, otherwise defaults to text.
knex.schema.createTable('text_example', function (table) {
table.text('bio');
});create table "text_example" ("bio" text)CREATE TABLE [text_example] ([bio] nvarchar(max))create table `text_example` (`bio` text)create table "text_example" ("bio" clob)create table "text_example" ("bio" text)create table "text_example" ("bio" varchar(max))create table `text_example` (`bio` text)string
table.string(name, [length])
Adds a string column, with optional length defaulting to 255.
knex.schema.createTable('string_example', function (table) {
table.string('name', 100);
});create table "string_example" ("name" varchar(100))CREATE TABLE [string_example] ([name] nvarchar(100))create table `string_example` (`name` varchar(100))create table "string_example" ("name" varchar2(100))create table "string_example" ("name" varchar(100))create table "string_example" ("name" varchar(100))create table `string_example` (`name` varchar(100))float
table.float(column, [precision], [scale])
Adds a float column, with optional precision (defaults to 8) and scale (defaults to 2).
knex.schema.createTable('float_example', function (table) {
table.float('rating', 5, 2);
});create table "float_example" ("rating" real)CREATE TABLE [float_example] ([rating] float)create table `float_example` (`rating` float(5, 2))create table "float_example" ("rating" float(5))create table "float_example" ("rating" real)create table "float_example" ("rating" real)create table `float_example` (`rating` float)double
table.double(column, [precision], [scale])
Adds a double column, with optional precision (defaults to 8) and scale (defaults to 2). In SQLite/MSSQL this is a float with no precision/scale; In PostgreSQL this is a double precision; In Oracle this is a number with matching precision/scale.
knex.schema.createTable('double_example', function (table) {
table.double('score', 8, 2);
});create table "double_example" ("score" double precision)CREATE TABLE [double_example] ([score] float)create table `double_example` (`score` double(8, 2))create table "double_example" ("score" number(8, 2))create table "double_example" ("score" double precision)create table "double_example" ("score" double precision)create table `double_example` (`score` float)decimal
table.decimal(column, [precision], [scale])
Adds a decimal column, with optional precision (defaults to 8) and scale (defaults to 2). Specifying NULL as precision creates a decimal column that can store numbers of any precision and scale. (Only supported for Oracle, SQLite, Postgres)
knex.schema.createTable('decimal_example', function (table) {
table.decimal('amount', 8, 2);
});create table "decimal_example" ("amount" decimal(8, 2))CREATE TABLE [decimal_example] ([amount] decimal(8, 2))create table `decimal_example` (`amount` decimal(8, 2))create table "decimal_example" ("amount" decimal(8, 2))create table "decimal_example" ("amount" decimal(8, 2))create table "decimal_example" ("amount" decimal(8, 2))create table `decimal_example` (`amount` float)boolean
table.boolean(name)
Adds a boolean column.
knex.schema.createTable('boolean_example', function (table) {
table.boolean('is_active');
});create table "boolean_example" ("is_active" boolean)CREATE TABLE [boolean_example] ([is_active] bit)create table `boolean_example` (`is_active` boolean)create table "boolean_example" ("is_active" number(1, 0) check ("is_active" in ('0', '1')))create table "boolean_example" ("is_active" boolean)create table "boolean_example" ("is_active" boolean)create table `boolean_example` (`is_active` boolean)date
table.date(name)
Adds a date column.
knex.schema.createTable('date_example', function (table) {
table.date('birthdate');
});create table "date_example" ("birthdate" date)CREATE TABLE [date_example] ([birthdate] date)create table `date_example` (`birthdate` date)create table "date_example" ("birthdate" date)create table "date_example" ("birthdate" date)create table "date_example" ("birthdate" date)create table `date_example` (`birthdate` date)datetime
table.datetime(name, options={[useTz: boolean], [precision: number]})
Adds a datetime column. By default PostgreSQL creates column with timezone (timestamptz type). This behaviour can be overriden by passing the useTz option (which is by default true for PostgreSQL). MySQL and MSSQL do not have useTz option.
A precision option may be passed:
knex.schema.createTable('events', (table) => {
table.datetime('some_time', { precision: 6 }).defaultTo(knex.fn.now(6));
});create table "events" ("some_time" timestamptz(6) default CURRENT_TIMESTAMP(6))CREATE TABLE [events] ([some_time] datetime2 CONSTRAINT [events_some_time_default] DEFAULT CURRENT_TIMESTAMP(6))create table `events` (`some_time` datetime(6) default CURRENT_TIMESTAMP(6))create table "events" ("some_time" timestamp default CURRENT_TIMESTAMP(6))create table "events" ("some_time" timestamptz(6) default CURRENT_TIMESTAMP(6))create table "events" ("some_time" timestamp default CURRENT_TIMESTAMP(6))create table `events` (`some_time` datetime default CURRENT_TIMESTAMP(6))time RS
table.time(name, [precision])
Adds a time column, with optional precision for MySQL. Not supported on Amazon Redshift.
In MySQL a precision option may be passed:
knex.schema.createTable('events', (table) => {
table.time('some_time', { precision: 6 });
});create table "events" ("some_time" time)CREATE TABLE [events] ([some_time] time)create table `events` (`some_time` time(6))create table "events" ("some_time" timestamp with local time zone)create table "events" ("some_time" time)create table "events" ("some_time" time)create table `events` (`some_time` time)timestamp
table.timestamp(name, options={[useTz: boolean], [precision: number]})
Adds a timestamp column. By default PostgreSQL creates column with timezone (timestamptz type) and MSSQL does not (datetime2). This behaviour can be overriden by passing the useTz option (which is by default false for MSSQL and true for PostgreSQL). MySQL does not have useTz option.
knex.schema.createTable('events', (table) => {
table.timestamp('created_at').defaultTo(knex.fn.now());
});create table "events" ("created_at" timestamptz default CURRENT_TIMESTAMP)CREATE TABLE [events] ([created_at] datetime2 CONSTRAINT [events_created_at_default] DEFAULT CURRENT_TIMESTAMP)create table `events` (`created_at` timestamp default CURRENT_TIMESTAMP)create table "events" ("created_at" timestamp with local time zone default CURRENT_TIMESTAMP)create table "events" ("created_at" timestamptz default CURRENT_TIMESTAMP)create table "events" ("created_at" timestamptz default CURRENT_TIMESTAMP)create table `events` (`created_at` datetime default CURRENT_TIMESTAMP)In PostgreSQL and MySQL a precision option may be passed:
knex.schema.createTable('events', (table) => {
table.timestamp('created_at', { precision: 6 }).defaultTo(knex.fn.now(6));
});create table "events" ("created_at" timestamptz(6) default CURRENT_TIMESTAMP(6))CREATE TABLE [events] ([created_at] datetime2 CONSTRAINT [events_created_at_default] DEFAULT CURRENT_TIMESTAMP(6))create table `events` (`created_at` timestamp(6) default CURRENT_TIMESTAMP(6))create table "events" ("created_at" timestamp default CURRENT_TIMESTAMP(6))create table "events" ("created_at" timestamptz(6) default CURRENT_TIMESTAMP(6))create table "events" ("created_at" timestamp default CURRENT_TIMESTAMP(6))create table `events` (`created_at` datetime default CURRENT_TIMESTAMP(6))In PostgreSQL and MSSQL a timezone option may be passed:
knex.schema.createTable('events', (table) => {
table.timestamp('created_at', { useTz: true });
});create table "events" ("created_at" timestamptz)CREATE TABLE [events] ([created_at] datetimeoffset)create table `events` (`created_at` timestamp)create table "events" ("created_at" timestamp with local time zone)create table "events" ("created_at" timestamptz)create table "events" ("created_at" timestamp)create table `events` (`created_at` datetime)timestamps
table.timestamps([useTimestamps], [defaultToNow], [useCamelCase])
Adds created_at and updated_at columns on the database, setting each to datetime types. When true is passed as the first argument a timestamp type is used instead. Both columns default to being not null and using the current timestamp when true is passed as the second argument. Note that on MySQL the .timestamps() only have seconds precision, to get better precision use the .datetime or .timestamp methods directly with precision. If useCamelCase is true, the name of columns are createdAt and updatedAt.
INFO
PostgreSQL updated_at field will not automatically be updated. Please see this issue for details
knex.schema.createTable('timestamps_example', function (table) {
table.timestamps(true, true);
});create table "timestamps_example" ("created_at" timestamptz not null default CURRENT_TIMESTAMP, "updated_at" timestamptz not null default CURRENT_TIMESTAMP)CREATE TABLE [timestamps_example] ([created_at] datetime2 not null CONSTRAINT [timestamps_example_created_at_default] DEFAULT CURRENT_TIMESTAMP, [updated_at] datetime2 not null CONSTRAINT [timestamps_example_updated_at_default] DEFAULT CURRENT_TIMESTAMP)create table `timestamps_example` (`created_at` timestamp not null default CURRENT_TIMESTAMP, `updated_at` timestamp not null default CURRENT_TIMESTAMP)create table "timestamps_example" ("created_at" timestamp with local time zone default CURRENT_TIMESTAMP not null, "updated_at" timestamp with local time zone default CURRENT_TIMESTAMP not null)create table "timestamps_example" ("created_at" timestamptz not null default CURRENT_TIMESTAMP, "updated_at" timestamptz not null default CURRENT_TIMESTAMP)create table "timestamps_example" ("created_at" timestamptz not null default CURRENT_TIMESTAMP, "updated_at" timestamptz not null default CURRENT_TIMESTAMP)create table `timestamps_example` (`created_at` datetime not null default CURRENT_TIMESTAMP, `updated_at` datetime not null default CURRENT_TIMESTAMP)dropTimestamps
table.dropTimestamps([useCamelCase])
Drops the columns created_at and updated_at from the table, which can be created via timestamps. If useCamelCase is true, the name of columns are createdAt and updatedAt.
knex.schema.table('timestamps_example', function (table) {
table.dropTimestamps();
});alter table "timestamps_example" drop column "created_at", drop column "updated_at"
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'timestamps_example'
AND all_columns.name = 'created_at')
IF @constraint IS NOT NULL EXEC('ALTER TABLE timestamps_example DROP CONSTRAINT ' + @constraint)
DECLARE @constraint varchar(100) = (SELECT default_constraints.name
FROM sys.all_columns
INNER JOIN sys.tables
ON all_columns.object_id = tables.object_id
INNER JOIN sys.schemas
ON tables.schema_id = schemas.schema_id
INNER JOIN sys.default_constraints
ON all_columns.default_object_id = default_constraints.object_id
WHERE schemas.name = 'dbo'
AND tables.name = 'timestamps_example'
AND all_columns.name = 'updated_at')
IF @constraint IS NOT NULL EXEC('ALTER TABLE timestamps_example DROP CONSTRAINT ' + @constraint)
ALTER TABLE [timestamps_example] DROP COLUMN [created_at], [updated_at]alter table `timestamps_example` drop `created_at`, drop `updated_at`alter table "timestamps_example" drop ("created_at", "updated_at")alter table "timestamps_example" drop column "created_at", drop column "updated_at"alter table "timestamps_example" drop column "created_at", drop column "updated_at"Error: TypeError: first.toLowerCase is not a functionbinary
table.binary(name, [length])
Adds a binary column, with optional length argument for MySQL.
knex.schema.createTable('binary_example', function (table) {
table.binary('payload');
});create table "binary_example" ("payload" bytea)CREATE TABLE [binary_example] ([payload] varbinary(max))create table `binary_example` (`payload` blob)create table "binary_example" ("payload" blob)create table "binary_example" ("payload" bytea)create table "binary_example" ("payload" varchar(max))create table `binary_example` (`payload` blob)enum / enu
table.enu(col, values, [options])
Adds a enum column, (aliased to enu, as enum is a reserved word in JavaScript). Implemented as unchecked varchar(255) on Amazon Redshift. Note that the second argument is an array of values. Example:
knex.schema.createTable('users', (table) => {
table.enu('column', ['value1', 'value2']);
});create table "users" ("column" text check ("column" in ('value1', 'value2')))CREATE TABLE [users] ([column] nvarchar(100))create table `users` (`column` enum('value1', 'value2'))create table "users" ("column" varchar2(6) check ("column" in ('value1', 'value2')))create table "users" ("column" text check ("column" in ('value1', 'value2')))create table "users" ("column" varchar(255))create table `users` (`column` text check (`column` in ('value1', 'value2')))For Postgres, an additional options argument can be provided to specify whether or not to use Postgres's native TYPE:
knex.schema.createTable('users', (table) => {
table.enu('column', ['value1', 'value2'], {
useNative: true,
enumName: 'foo_type',
});
});create type "foo_type" as enum ('value1', 'value2')
create table "users" ("column" "foo_type")CREATE TABLE [users] ([column] nvarchar(100))create table `users` (`column` enum('value1', 'value2'))create table "users" ("column" varchar2(6) check ("column" in ('value1', 'value2')))create type "foo_type" as enum ('value1', 'value2')
create table "users" ("column" "foo_type")create table "users" ("column" varchar(255))create table `users` (`column` text check (`column` in ('value1', 'value2')))It will use the values provided to generate the appropriate TYPE. Example:
CREATE TYPE "foo_type" AS ENUM ('value1', 'value2');To use an existing native type across columns, specify 'existingType' in the options (this assumes the type has already been created):
INFO
Since the enum values aren't utilized for a native && existing type, the type being passed in for values is immaterial.
knex.schema.createTable('users', (table) => {
table.enu('column', ['value1'], {
useNative: true,
existingType: true,
enumName: 'foo_type',
});
});create table "users" ("column" "foo_type")CREATE TABLE [users] ([column] nvarchar(100))create table `users` (`column` enum('value1'))create table "users" ("column" varchar2(6) check ("column" in ('value1')))create table "users" ("column" "foo_type")create table "users" ("column" varchar(255))create table `users` (`column` text check (`column` in ('value1')))If you want to use existing enums from a schema, different from the schema of your current table, specify 'schemaName' in the options:
knex.schema.createTable('users', (table) => {
table.enu('column', ['value1'], {
useNative: true,
existingType: true,
enumName: 'foo_type',
schemaName: 'public',
});
});create table "users" ("column" "public"."foo_type")CREATE TABLE [users] ([column] nvarchar(100))create table `users` (`column` enum('value1'))create table "users" ("column" varchar2(6) check ("column" in ('value1')))create table "users" ("column" "public"."foo_type")create table "users" ("column" varchar(255))create table `users` (`column` text check (`column` in ('value1')))Knex does not provide any way to alter enumerations after creation. To change an enumeration later on you must use Knex.raw, and the appropriate command for your database.
json
table.json(name)
Adds a json column, using the built-in json type in PostgreSQL, MySQL and SQLite, defaulting to a text column in older versions or in unsupported databases.
For PostgreSQL, due to incompatibility between native array and json types, when setting an array (or a value that could be an array) as the value of a json or jsonb column, you should use JSON.stringify() to convert your value to a string prior to passing it to the query builder, e.g.
knex
.table('users')
.where({ id: 1 })
.update({ json_data: JSON.stringify(['a', 'b']) });update "users" set "json_data" = '["a","b"]' where "id" = 1update [users] set [json_data] = '["a","b"]' where [id] = 1;select @@rowcountupdate `users` set `json_data` = '[\"a\",\"b\"]' where `id` = 1update "users" set "json_data" = '["a","b"]' where "id" = 1update "users" set "json_data" = '["a","b"]' where "id" = 1update "users" set "json_data" = '["a","b"]' where "id" = 1update `users` set `json_data` = '["a","b"]' where `id` = 1jsonb
table.jsonb(name)
Adds a jsonb column. Works similar to table.json(), but uses native jsonb type if possible.
knex.schema.createTable('jsonb_example', function (table) {
table.jsonb('metadata');
});create table "jsonb_example" ("metadata" jsonb)CREATE TABLE [jsonb_example] ([metadata] nvarchar(max))create table `jsonb_example` (`metadata` json)create table "jsonb_example" ("metadata" varchar2(4000) check ("metadata" is json))create table "jsonb_example" ("metadata" jsonb)create table "jsonb_example" ("metadata" varchar(max))create table `jsonb_example` (`metadata` json)uuid RS
table.uuid(name, options=({[useBinaryUuid:boolean],[primaryKey:boolean]})
Adds a uuid column - this uses the built-in uuid type in PostgreSQL, and falling back to a char(36) in other databases by default. If useBinaryUuid is true, binary(16) is used. See uuidToBin function to convert uuid in binary before inserting and binToUuid to convert binary uuid to uuid. If primaryKey is true, then for PostgreSQL the field will be configured as uuid primary key, for CockroackDB an additional default gen_random_uuid() is set on the type.
You may set the default value to the uuid helper function. Not supported by Redshift.
knex.schema.createTable('uuid_table', (table) => {
table.uuid('uuidColumn').defaultTo(knex.fn.uuid());
});create table "uuid_table" ("uuidColumn" uuid default (gen_random_uuid()))CREATE TABLE [uuid_table] ([uuidColumn] uniqueidentifier CONSTRAINT [uuid_table_uuidcolumn_default] DEFAULT (NEWID()))create table `uuid_table` (`uuidColumn` char(36) default (UUID()))create table "uuid_table" ("uuidColumn" char(36) default (random_uuid()))create table "uuid_table" ("uuidColumn" uuid default (gen_random_uuid()))Error: pg-redshift does not have a uuid functioncreate table `uuid_table` (`uuidColumn` char(36) default (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab',abs(random()) % 4 + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))))geometry MYORCRRS
table.geometry(name)
Adds a geometry column. Supported by SQLite, MSSQL and PostgreSQL.
knex.schema.createTable('geo_table', (table) => {
table.geometry('geometryColumn');
});create table "geo_table" ("geometryColumn" geometry)CREATE TABLE [geo_table] ([geometryColumn] geometry)create table `geo_table` (`geometryColumn` geometry)create table "geo_table" ("geometryColumn" geometry)create table "geo_table" ("geometryColumn" geometry)create table "geo_table" ("geometryColumn" geometry)create table `geo_table` (`geometryColumn` geometry)geography MYORCRRS
table.geography(name)
Adds a geography column. Supported by SQLite, MSSQL and PostgreSQL (in PostGIS extension).
knex.schema.createTable('geo_table', (table) => {
table.geography('geographyColumn');
});create table "geo_table" ("geographyColumn" geography)CREATE TABLE [geo_table] ([geographyColumn] geography)create table `geo_table` (`geographyColumn` geography)create table "geo_table" ("geographyColumn" geography)create table "geo_table" ("geographyColumn" geography)create table "geo_table" ("geographyColumn" geography)create table `geo_table` (`geographyColumn` geography)point CRMS
table.point(name)
Add a point column. Not supported by CockroachDB and MSSQL.
knex.schema.createTable('point_table', (table) => {
table.point('pointColumn');
});create table "point_table" ("pointColumn" point)CREATE TABLE [point_table] ([pointColumn] point)create table `point_table` (`pointColumn` point)create table "point_table" ("pointColumn" point)create table "point_table" ("pointColumn" point)create table "point_table" ("pointColumn" point)create table `point_table` (`pointColumn` point)comment
table.comment(value)
Sets the comment for a table.
knex.schema.createTable('comment_example', function (table) {
table.increments('id');
table.comment('Stores example rows');
});create table "comment_example" ("id" serial primary key)
comment on table "comment_example" is 'Stores example rows'CREATE TABLE [comment_example] ([id] int identity(1,1) not null primary key)
IF EXISTS(SELECT * FROM sys.fn_listextendedproperty(N'MS_Description', N'Schema', N'dbo', N'Table', N'comment_example', NULL, NULL))
EXEC sys.sp_updateextendedproperty N'MS_Description', N'Stores example rows', N'Schema', N'dbo', N'Table', N'comment_example'
ELSE
EXEC sys.sp_addextendedproperty N'MS_Description', N'Stores example rows', N'Schema', N'dbo', N'Table', N'comment_example'create table `comment_example` (`id` int unsigned not null auto_increment primary key) comment = 'Stores example rows'create table "comment_example" ("id" integer not null primary key)
comment on table "comment_example" is 'Stores example rows'
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "comment_example_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'comment_example'; execute immediate ('create or replace trigger "comment_example_autoinc_trg" BEFORE INSERT on "comment_example" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "comment_example_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "comment_example" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;create table "comment_example" ("id" serial primary key)
comment on table "comment_example" is 'Stores example rows'create table "comment_example" ("id" integer identity(1,1) primary key not null)
comment on table "comment_example" is 'Stores example rows'create table `comment_example` (`id` integer not null primary key autoincrement)engine MY only
table.engine(val)
Sets the engine for the database table, only available within a createTable call, and only applicable to MySQL.
knex.schema.createTable('engine_example', function (table) {
table.increments('id');
table.engine('InnoDB');
});Error: Knex only supports engine statement with mysql.Error: Knex only supports engine statement with mysql.create table `engine_example` (`id` int unsigned not null auto_increment primary key) engine = InnoDBError: Knex only supports engine statement with mysql.Error: Knex only supports engine statement with mysql.Error: Knex only supports engine statement with mysql.Error: Knex only supports engine statement with mysql.charset MY only
table.charset(val)
Sets the charset for the database table, only available within a createTable call, and only applicable to MySQL.
knex.schema.createTable('charset_example', function (table) {
table.increments('id');
table.charset('utf8mb4');
});Error: Knex only supports charset statement with mysql.Error: Knex only supports charset statement with mysql.create table `charset_example` (`id` int unsigned not null auto_increment primary key) default character set utf8mb4Error: Knex only supports charset statement with mysql.Error: Knex only supports charset statement with mysql.Error: Knex only supports charset statement with mysql.Error: Knex only supports charset statement with mysql.collate MY only
table.collate(val)
Sets the collation for the database table, only available within a createTable call, and only applicable to MySQL.
knex.schema.createTable('collate_example', function (table) {
table.increments('id');
table.collate('utf8mb4_unicode_ci');
});Error: Knex only supports collate statement with mysql.Error: Knex only supports collate statement with mysql.create table `collate_example` (`id` int unsigned not null auto_increment primary key) collate utf8mb4_unicode_ciError: Knex only supports collate statement with mysql.Error: Knex only supports collate statement with mysql.Error: Knex only supports collate statement with mysql.Error: Knex only supports collate statement with mysql.inherits PG only
table.inherits(val)
Sets the tables that this table inherits, only available within a createTable call, and only applicable to PostgreSQL.
knex.schema.createTable('inherits_example', function (table) {
table.increments('id');
table.inherits('parent_table');
});create table "inherits_example" ("id" serial primary key) inherits ("parent_table")Error: Knex only supports inherits statement with postgresql.Error: Knex only supports inherits statement with postgresql.Error: Knex only supports inherits statement with postgresql.create table "inherits_example" ("id" serial primary key) inherits ("parent_table")Error: Knex only supports inherits statement with postgresql.Error: Knex only supports inherits statement with postgresql.specificType
table.specificType(name, type)
Sets a specific type for the column creation, if you'd like to add a column type that isn't supported here.
knex.schema.createTable('specific_type_example', function (table) {
table.specificType('ip_address', 'inet');
});create table "specific_type_example" ("ip_address" inet)CREATE TABLE [specific_type_example] ([ip_address] inet)create table `specific_type_example` (`ip_address` inet)create table "specific_type_example" ("ip_address" inet)create table "specific_type_example" ("ip_address" inet)create table "specific_type_example" ("ip_address" inet)create table `specific_type_example` (`ip_address` inet)index RS
table.index(columns, [indexName], options=({[indexType: string], [storageEngineIndexType: 'btree'|'hash'], [predicate: QueryBuilder]}))
Adds an index to a table over the given columns. A default index name using the columns is used unless indexName is specified. In MySQL, the storage engine index type may be 'btree' or 'hash' index types, more info in Index Options section : https://dev.mysql.com/doc/refman/8.0/en/create-index.html. The indexType can be optionally specified for PostgreSQL and MySQL. Amazon Redshift does not allow creating an index. In PostgreSQL, SQLite and MSSQL a partial index can be specified by setting a 'where' predicate.
knex.schema.table('users', function (table) {
table.index(['name', 'last_name'], 'idx_name_last_name', {
indexType: 'FULLTEXT',
storageEngineIndexType: 'hash',
predicate: knex.whereNotNull('email'),
});
});create index "idx_name_last_name" on "users" using hash ("name", "last_name") where "email" is not nullCREATE INDEX [idx_name_last_name] ON [users] ([name], [last_name]) where [email] is not nullalter table `users` add FULLTEXT index `idx_name_last_name`(`name`, `last_name`) using hashcreate index "idx_name_last_name" on "users" ("name", "last_name")create index "idx_name_last_name" on "users" using hash ("name", "last_name") where "email" is not nullcreate index `idx_name_last_name` on `users` (`name`, `last_name`) where `email` is not nulldropIndex RS
table.dropIndex(columns, [indexName])
Drops an index from a table. A default index name using the columns is used unless indexName is specified (in which case columns is ignored). Amazon Redshift does not allow creating an index.
knex.schema.table('users', function (table) {
table.dropIndex(['name', 'last_name']);
});drop index "users_name_last_name_index"DROP INDEX [users_name_last_name_index] ON [users]alter table `users` drop index `users_name_last_name_index`drop index "users_name_last_name_index"drop index "users_name_last_name_index"drop index `users_name_last_name_index`setNullable ~SQ
table.setNullable(column)
Makes table column nullable.
knex.schema.table('users', function (table) {
table.setNullable('email');
});alter table "users" alter column "email" drop not nullalter table [users] alter column [email] nvarchar(255) nullalter table `users` modify `email` varchar(255) nullalter table "users" modify ("email" NULL)alter table "users" alter column "email" drop not nullalter table "users" alter column "email" drop not nullCREATE TABLE `_knex_temp_alter076` (`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, `email` varchar(255), `age` integer, `companyId` integer, `user_id` integer, FOREIGN KEY (`companyId`) REFERENCES `company` (`companyId`))
INSERT INTO "_knex_temp_alter076" SELECT * FROM "users";
DROP TABLE "users"
ALTER TABLE "_knex_temp_alter076" RENAME TO "users"dropNullable ~SQ
table.dropNullable(column)
Makes table column not nullable. Note that this operation will fail if there are already null values in this column.
knex.schema.table('users', function (table) {
table.dropNullable('email');
});alter table "users" alter column "email" set not nullalter table [users] alter column [email] nvarchar(255) not nullalter table `users` modify `email` varchar(255) not nullalter table "users" modify ("email" NOT NULL)alter table "users" alter column "email" set not nullalter table "users" alter column "email" set not nullCREATE TABLE `_knex_temp_alter215` (`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, `email` varchar(255) NOT NULL, `age` integer, `companyId` integer, `user_id` integer, FOREIGN KEY (`companyId`) REFERENCES `company` (`companyId`))
INSERT INTO "_knex_temp_alter215" SELECT * FROM "users";
DROP TABLE "users"
ALTER TABLE "_knex_temp_alter215" RENAME TO "users"primary ~SQ
table.primary(columns, options=({[constraintName:string],[deferrable:'not deferrable'|'deferred'|'immediate']})
Create a primary key constraint on table using input columns. If you need to create a composite primary key, pass an array of columns to columns. Constraint name defaults to tablename_pkey unless constraintName is specified. On Amazon Redshift, all columns included in a primary key must be not nullable. Deferrable primary constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object.
knex.schema.createTable('job', function (t) {
t.string('email');
t.primary('email', {
constraintName: 'users_primary_key',
deferrable: 'deferred',
});
});create table "job" ("email" varchar(255), constraint "users_primary_key" primary key ("email") deferrable initially deferred)CREATE TABLE [job] ([email] nvarchar(255), CONSTRAINT [users_primary_key] PRIMARY KEY ([email]))create table `job` (`email` varchar(255), constraint `users_primary_key` as `constraintName`, `deferred` as `deferrable` primary key (`email`))create table "job" ("email" varchar2(255))
alter table "job" add constraint "users_primary_key" primary key ("email") deferrable initially deferredcreate table "job" ("email" varchar(255), constraint "users_primary_key" primary key ("email") deferrable initially deferred)create table "job" ("email" varchar(255))create table `job` (`email` varchar(255), constraint `users_primary_key` as `constraintName`, `deferred` as `deferrable` primary key (`email`))INFO
If you want to chain primary() while creating new column you can use primary
unique
table.unique(columns, options={[indexName: string], [deferrable:'not deferrable'|'immediate'|'deferred'], [storageEngineIndexType:'btree'|'hash'], [useConstraint:true|false], [predicate: QueryBuilder]})
Adds an unique index to a table over the given columns. In MySQL, the storage engine index type may be 'btree' or 'hash' index types, more info in Index Options section : https://dev.mysql.com/doc/refman/8.0/en/create-index.html. A default index name using the columns is used unless indexName is specified. If you need to create a composite index, pass an array of column to columns. Deferrable unique constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object. In MSSQL and Postgres, you can set the useConstraint option to true to create a unique constraint instead of a unique index (defaults to false for MSSQL, true for Postgres without predicate, false for Postgres with predicate). In PostgreSQL, SQLite and MSSQL a partial unique index can be specified by setting a 'where' predicate.
knex.schema.alterTable('users', function (t) {
t.unique('email');
});
knex.schema.alterTable('job', function (t) {
t.unique(['account_id', 'program_id'], {
indexName: 'job_composite_index',
deferrable: 'deferred',
storageEngineIndexType: 'hash',
});
});
knex.schema.alterTable('job', function (t) {
t.unique(['account_id', 'program_id'], {
indexName: 'job_composite_index',
useConstraint: true,
});
});
knex.schema.alterTable('job', function (t) {
t.unique(['account_id', 'program_id'], {
indexName: 'job_composite_index',
predicate: knex.whereNotNull('account_id'),
});
});alter table "users" add constraint "users_email_unique" unique ("email")
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id") deferrable initially deferred
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id")
-- ----
create unique index "job_composite_index" on "job" ("account_id", "program_id") where "account_id" is not nullCREATE UNIQUE INDEX [users_email_unique] ON [users] ([email]) WHERE [email] IS NOT NULL
-- ----
CREATE UNIQUE INDEX [job_composite_index] ON [job] ([account_id], [program_id]) WHERE [account_id] IS NOT NULL AND [program_id] IS NOT NULL
-- ----
ALTER TABLE [job] ADD CONSTRAINT [job_composite_index] UNIQUE ([account_id], [program_id])
-- ----
CREATE UNIQUE INDEX [job_composite_index] ON [job] ([account_id], [program_id]) where [account_id] is not nullalter table `users` add unique `users_email_unique`(`email`)
-- ----
alter table `job` add unique `job_composite_index`(`account_id`, `program_id`) using hash
-- ----
alter table `job` add unique `job_composite_index`(`account_id`, `program_id`)
-- ----
alter table `job` add unique `job_composite_index`(`account_id`, `program_id`)alter table "users" add constraint "users_email_unique" unique ("email")
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id") deferrable initially deferred
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id")
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id")alter table "users" add constraint "users_email_unique" unique ("email")
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id") deferrable initially deferred
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id")
-- ----
create unique index "job_composite_index" on "job" ("account_id", "program_id") where "account_id" is not nullalter table "users" add constraint "users_email_unique" unique ("email")
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id") deferrable initially deferred
-- ----
alter table "job" add constraint "job_composite_index" unique ("account_id", "program_id")
-- ----
create unique index "job_composite_index" on "job" ("account_id", "program_id") where "account_id" is not nullcreate unique index `users_email_unique` on `users` (`email`)
-- ----
create unique index `job_composite_index` on `job` (`account_id`, `program_id`)
-- ----
create unique index `job_composite_index` on `job` (`account_id`, `program_id`)
-- ----
create unique index `job_composite_index` on `job` (`account_id`, `program_id`) where `account_id` is not nullINFO
If you want to chain unique() while creating new column you can use unique
foreign ~SQ
table.foreign(columns, [foreignKeyName])[.onDelete(statement).onUpdate(statement).withKeyName(foreignKeyName).deferrable(type)]
Adds a foreign key constraint to a table for an existing column using table.foreign(column).references(column) or multiple columns using table.foreign(columns).references(columns).inTable(table).
A default key name using the columns is used unless foreignKeyName is specified.
You can also chain onDelete() and/or onUpdate() to set the reference option (RESTRICT, CASCADE, SET NULL, NO ACTION) for the operation. You can also chain withKeyName() to override default key name that is generated from table and column names (result is identical to specifying second parameter to function foreign()).
Deferrable foreign constraints are supported on Postgres, Oracle, and SQLite; calling .deferrable(type) will throw on MySQL, MSSQL, and Redshift.
Note that using foreign() is the same as column.references(column) but it works for existing columns.
knex.schema.table('users', function (table) {
table.integer('user_id').unsigned();
table.foreign('user_id').references('Items.user_id_in_items');
});alter table "users" add column "user_id" integer
alter table "users" add constraint "users_user_id_foreign" foreign key ("user_id") references "Items" ("user_id_in_items")ALTER TABLE [users] ADD [user_id] int
ALTER TABLE [users] ADD CONSTRAINT [users_user_id_foreign] FOREIGN KEY ([user_id]) REFERENCES [Items] ([user_id_in_items])alter table `users` add `user_id` int unsigned
alter table `users` add constraint `users_user_id_foreign` foreign key (`user_id`) references `Items` (`user_id_in_items`)alter table "users" add "user_id" integer
alter table "users" add constraint "users_user_id_foreign" foreign key ("user_id") references "Items" ("user_id_in_items")alter table "users" add column "user_id" integer
alter table "users" add constraint "users_user_id_foreign" foreign key ("user_id") references "Items" ("user_id_in_items")alter table "users" add column "user_id" integer
alter table "users" add constraint "users_user_id_foreign" foreign key ("user_id") references "Items" ("user_id_in_items")alter table `users` add column `user_id` integer
CREATE TABLE `_knex_temp_alter114` (`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, `email` varchar(255), `age` integer, `companyId` integer, `user_id` integer, FOREIGN KEY (`companyId`) REFERENCES `company` (`companyId`), FOREIGN KEY (`user_id`) REFERENCES `Items` (`user_id_in_items`))
INSERT INTO "_knex_temp_alter114" SELECT * FROM "users";
DROP TABLE "users"
ALTER TABLE "_knex_temp_alter114" RENAME TO "users"Deferrable example:
knex.schema.table('users', function (table) {
table
.foreign('user_id')
.references('Items.user_id_in_items')
.deferrable('deferred');
});alter table "users" add constraint "users_user_id_foreign" foreign key ("user_id") references "Items" ("user_id_in_items") deferrable initially deferredError: mssql does not support deferrableError: mysql does not support deferrablealter table "users" add constraint "users_user_id_foreign" foreign key ("user_id") references "Items" ("user_id_in_items") deferrable initially deferredalter table "users" add constraint "users_user_id_foreign" foreign key ("user_id") references "Items" ("user_id_in_items") deferrable initially deferredError: redshift does not support deferrableCREATE TABLE `_knex_temp_alter056` (`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, `email` varchar(255), `age` integer, `companyId` integer, `user_id` integer, FOREIGN KEY (`companyId`) REFERENCES `company` (`companyId`), FOREIGN KEY (`user_id`) REFERENCES `Items` (`user_id_in_items`))
INSERT INTO "_knex_temp_alter056" SELECT * FROM "users";
DROP TABLE "users"
ALTER TABLE "_knex_temp_alter056" RENAME TO "users"dropForeign ~SQ
table.dropForeign(columns, [foreignKeyName])
Drops a foreign key constraint from a table. A default foreign key name using the columns is used unless foreignKeyName is specified (in which case columns is ignored).
knex.schema.table('users', function (table) {
table.dropForeign('companyId');
});alter table "users" drop constraint "users_companyid_foreign"ALTER TABLE [users] DROP CONSTRAINT [users_companyid_foreign]alter table `users` drop foreign key `users_companyid_foreign`alter table "users" drop constraint "users_companyid_foreign"alter table "users" drop constraint "users_companyid_foreign"alter table "users" drop constraint "users_companyid_foreign"PRAGMA table_info(`users`)dropUnique
table.dropUnique(columns, [indexName])
Drops a unique key constraint from a table. A default unique key name using the columns is used unless indexName is specified (in which case columns is ignored).
knex.schema.table('job', function (table) {
table.dropUnique(['account_id', 'program_id'], 'job_composite_index');
});drop index "job"@"job_composite_index" cascadeDROP INDEX [job_composite_index] ON [job]alter table `job` drop index `job_composite_index`alter table "job" drop constraint "job_composite_index"alter table "job" drop constraint "job_composite_index"alter table "job" drop constraint "job_composite_index"drop index `job_composite_index`dropPrimary ~SQ
table.dropPrimary([constraintName])
Drops the primary key constraint on a table. Defaults to tablename_pkey unless constraintName is specified.
knex.schema.table('users', function (table) {
table.dropPrimary();
});alter table "users" drop constraint "users_pkey"ALTER TABLE [users] DROP CONSTRAINT [users_pkey]alter table `users` drop primary keyalter table "users" drop constraint "users_pkey"alter table "users" drop constraint "users_pkey"alter table "users" drop constraint "users_pkey"PRAGMA table_info(`users`)queryContext
table.queryContext(context)
Allows configuring a context to be passed to the wrapIdentifier hook for formatting table builder identifiers. The context can be any kind of value and will be passed to wrapIdentifier without modification.
knex.schema.table('users', function (table) {
table.queryContext({ foo: 'bar' });
table.string('first_name');
table.string('last_name');
});alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)ALTER TABLE [users] ADD [first_name] nvarchar(255), [last_name] nvarchar(255)alter table `users` add `first_name` varchar(255), add `last_name` varchar(255)alter table "users" add ("first_name" varchar2(255), "last_name" varchar2(255))alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)alter table "users" add column "first_name" varchar(255)
alter table "users" add column "last_name" varchar(255)alter table `users` add column `first_name` varchar(255)
alter table `users` add column `last_name` varchar(255)This method also enables overwriting the context configured for a schema builder instance via schema.queryContext:
knex.schema.queryContext('schema context').table('users', function (table) {
table.queryContext('table context');
table.string('first_name');
table.string('last_name');
});alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)ALTER TABLE [users] ADD [first_name] nvarchar(255), [last_name] nvarchar(255)alter table `users` add `first_name` varchar(255), add `last_name` varchar(255)alter table "users" add ("first_name" varchar2(255), "last_name" varchar2(255))alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)alter table "users" add column "first_name" varchar(255)
alter table "users" add column "last_name" varchar(255)alter table `users` add column `first_name` varchar(255)
alter table `users` add column `last_name` varchar(255)Note that it's also possible to overwrite the table builder context for any column in the table definition:
knex.schema.queryContext('schema context').table('users', function (table) {
table.queryContext('table context');
table.string('first_name').queryContext('first_name context');
table.string('last_name').queryContext('last_name context');
});alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)ALTER TABLE [users] ADD [first_name] nvarchar(255), [last_name] nvarchar(255)alter table `users` add `first_name` varchar(255), add `last_name` varchar(255)alter table "users" add ("first_name" varchar2(255), "last_name" varchar2(255))alter table "users" add column "first_name" varchar(255), add column "last_name" varchar(255)alter table "users" add column "first_name" varchar(255)
alter table "users" add column "last_name" varchar(255)alter table `users` add column `first_name` varchar(255)
alter table `users` add column `last_name` varchar(255)Calling queryContext with no arguments will return any context configured for the table builder instance.
Chainable Methods
The following three methods may be chained on the schema building methods, as modifiers to the column.
alter ~SQRS
column.alter(options={[alterNullable: boolean = true, alterType: boolean = true])
Marks the column as an alter / modify, instead of the default add.
WARNING
This only works in .alterTable(). SQLite emulates this by rebuilding the table, and Amazon Redshift does not support it. Alter is not done incrementally over older column type so if you like to add notNullable and keep the old default value, the alter statement must contain both .notNullable().defaultTo(1).alter(). If one just tries to add .notNullable().alter() the old default value will be dropped. Nullable alterations are done only if alterNullable is true. Type alterations are done only if alterType is true.
knex.schema.alterTable('user', function (t) {
t.increments().primary(); // add
// drops previous default value from column,
// change type to string and add not nullable constraint
t.string('username', 35).notNullable().alter();
// drops both not null constraint and the default value
t.integer('age').alter();
// if alterNullable is false, drops only the default value
t.integer('age').alter({ alterNullable: false });
// if alterType is false, type of column is not altered.
t.integer('age').alter({ alterType: false });
});alter table "user" add column "id" serial primary key
SET enable_experimental_alter_column_type_general = true
alter table "user" alter column "username" drop default
alter table "user" alter column "username" drop not null
alter table "user" alter column "username" type varchar(35) using ("username"::varchar(35))
alter table "user" alter column "username" set not null
SET enable_experimental_alter_column_type_general = true
alter table "user" alter column "age" drop default
alter table "user" alter column "age" drop not null
alter table "user" alter column "age" type integer using ("age"::integer)
SET enable_experimental_alter_column_type_general = true
alter table "user" alter column "age" drop default
alter table "user" alter column "age" type integer using ("age"::integer)
SET enable_experimental_alter_column_type_general = true
alter table "user" alter column "age" drop default
alter table "user" alter column "age" drop not nullALTER TABLE [user] ADD [id] int identity(1,1) not null primary key
ALTER TABLE [user] ALTER COLUMN [username] nvarchar(35) not null
ALTER TABLE [user] ALTER COLUMN [age] int
ALTER TABLE [user] ALTER COLUMN [age] int
ALTER TABLE [user] ALTER COLUMN [age] intalter table `user` add `id` int unsigned not null auto_increment primary key
alter table `user` modify `username` varchar(35) not null, modify `age` int, modify `age` int, modify `age` intalter table "user" add "id" integer not null primary key
alter table "user" modify ("username" varchar2(35) not null, "age" integer, "age" integer, "age" integer)
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "user_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'user'; execute immediate ('create or replace trigger "user_autoinc_trg" BEFORE INSERT on "user" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "user_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "user" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;alter table "user" add column "id" serial primary key
alter table "user" alter column "username" drop default
alter table "user" alter column "username" drop not null
alter table "user" alter column "username" type varchar(35) using ("username"::varchar(35))
alter table "user" alter column "username" set not null
alter table "user" alter column "age" drop default
alter table "user" alter column "age" drop not null
alter table "user" alter column "age" type integer using ("age"::integer)
alter table "user" alter column "age" drop default
alter table "user" alter column "age" type integer using ("age"::integer)
alter table "user" alter column "age" drop default
alter table "user" alter column "age" drop not nullalter table "user" add column "id" integer identity(1,1) primary key not null
alter table "user" alter column "username" drop default
alter table "user" alter column "username" drop not null
alter table "user" alter column "username" type varchar(35) using ("username"::varchar(35))
alter table "user" alter column "username" set not null
alter table "user" alter column "age" drop default
alter table "user" alter column "age" drop not null
alter table "user" alter column "age" type integer using ("age"::integer)
alter table "user" alter column "age" drop default
alter table "user" alter column "age" type integer using ("age"::integer)
alter table "user" alter column "age" drop default
alter table "user" alter column "age" drop not nullalter table `user` add column `id` integer not null primary key autoincrement
CREATE TABLE `_knex_temp_alter848` (`id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, `username` varchar(35) NOT NULL, `age` integer)
INSERT INTO "_knex_temp_alter848" SELECT * FROM "user";
DROP TABLE "user"
ALTER TABLE "_knex_temp_alter848" RENAME TO "user"index
column.index([indexName], options=({[indexType: string], [storageEngineIndexType: 'btree'|'hash'], [predicate: QueryBuilder]}))
Specifies a field as an index. If an indexName is specified, it is used in place of the standard index naming convention of tableName_columnName. In MySQL, the storage engine index type may be 'btree' or 'hash' index types, more info in Index Options section : https://dev.mysql.com/doc/refman/8.0/en/create-index.html. The indexType can be optionally specified for PostgreSQL and MySQL. No-op if this is chained off of a field that cannot be indexed. In PostgreSQL, SQLite and MSSQL a partial index can be specified by setting a 'where' predicate.
knex.schema.createTable('index_example', function (table) {
table.string('email').index('idx_email');
});create table "index_example" ("email" varchar(255))
create index "idx_email" on "index_example" ("email")CREATE TABLE [index_example] ([email] nvarchar(255))
CREATE INDEX [idx_email] ON [index_example] ([email])create table `index_example` (`email` varchar(255))
alter table `index_example` add index `idx_email`(`email`)create table "index_example" ("email" varchar2(255))
create index "idx_email" on "index_example" ("email")create table "index_example" ("email" varchar(255))
create index "idx_email" on "index_example" ("email")create table "index_example" ("email" varchar(255))create table `index_example` (`email` varchar(255))
create index `idx_email` on `index_example` (`email`)primary
column.primary(options=({[constraintName:string],[deferrable:'not deferrable'|'deferred'|'immediate']}));
Sets a primary key constraint on column. Constraint name defaults to tablename_pkey unless constraintName is specified. On Amazon Redshift, all columns included in a primary key must be not nullable. Deferrable primary constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object.
knex.schema.createTable('users_primary', function (table) {
table.integer('user_id').primary({
constraintName: 'users_primary_key',
deferrable: 'deferred',
});
});create table "users_primary" ("user_id" integer, constraint "users_primary_key" primary key ("user_id") deferrable initially deferred)CREATE TABLE [users_primary] ([user_id] int, CONSTRAINT [users_primary_key] PRIMARY KEY ([user_id]))create table `users_primary` (`user_id` int, constraint `users_primary_key` as `constraintName`, `deferred` as `deferrable` primary key (`user_id`))create table "users_primary" ("user_id" integer)
alter table "users_primary" add constraint "users_primary_key" primary key ("user_id") deferrable initially deferredcreate table "users_primary" ("user_id" integer, constraint "users_primary_key" primary key ("user_id") deferrable initially deferred)create table "users_primary" ("user_id" integer not null)
alter table "users_primary" add constraint "users_primary_key" as "constraintName", "deferred" as "deferrable" primary key ("user_id")create table `users_primary` (`user_id` integer, constraint `users_primary_key` as `constraintName`, `deferred` as `deferrable` primary key (`user_id`))INFO
If you want to create primary constraint on existing column use primary
unique
column.unique(options={[indexName:string],[deferrable:'not deferrable'|'immediate'|'deferred']})
Sets the column as unique. On Amazon Redshift, this constraint is not enforced, but it is used by the query planner. Deferrable unique constraint are supported on Postgres and Oracle and can be set by passing deferrable option to options object.
knex.schema.table('users', function (table) {
table
.integer('user_id')
.unique({ indexName: 'user_unique_id', deferrable: 'immediate' });
});alter table "users" add column "user_id" integer
alter table "users" add constraint "user_unique_id" unique ("user_id") deferrable initially immediateALTER TABLE [users] ADD [user_id] int
CREATE UNIQUE INDEX [user_unique_id] ON [users] ([user_id]) WHERE [user_id] IS NOT NULLalter table `users` add `user_id` int
alter table `users` add unique `user_unique_id`(`user_id`)alter table "users" add "user_id" integer
alter table "users" add constraint "user_unique_id" unique ("user_id") deferrable initially immediatealter table "users" add column "user_id" integer
alter table "users" add constraint "user_unique_id" unique ("user_id") deferrable initially immediatealter table "users" add column "user_id" integer
alter table "users" add constraint "user_unique_id" unique ("user_id") deferrable initially immediatealter table `users` add column `user_id` integer
create unique index `user_unique_id` on `users` (`user_id`)INFO
If you want to create unique constraint on existing column use unique
references
column.references(column)
Sets the "column" that the current column references as a foreign key. "column" can either be "." syntax, or just the column name followed up with a call to inTable to specify the table.
knex.schema.createTable('references_example', function (table) {
table.integer('company_id').references('company.companyId');
});create table "references_example" ("company_id" integer)
alter table "references_example" add constraint "references_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")CREATE TABLE [references_example] ([company_id] int, CONSTRAINT [references_example_company_id_foreign] FOREIGN KEY ([company_id]) REFERENCES [company] ([companyId]))create table `references_example` (`company_id` int)
alter table `references_example` add constraint `references_example_company_id_foreign` foreign key (`company_id`) references `company` (`companyId`)create table "references_example" ("company_id" integer)
alter table "references_example" add constraint "references_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")create table "references_example" ("company_id" integer)
alter table "references_example" add constraint "references_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")create table "references_example" ("company_id" integer)
alter table "references_example" add constraint "references_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")create table `references_example` (`company_id` integer, foreign key(`company_id`) references `company`(`companyId`))inTable
column.inTable(table)
Sets the "table" where the foreign key column is located after calling column.references.
knex.schema.createTable('in_table_example', function (table) {
table.integer('company_id').references('companyId').inTable('company');
});create table "in_table_example" ("company_id" integer)
alter table "in_table_example" add constraint "in_table_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")CREATE TABLE [in_table_example] ([company_id] int, CONSTRAINT [in_table_example_company_id_foreign] FOREIGN KEY ([company_id]) REFERENCES [company] ([companyId]))create table `in_table_example` (`company_id` int)
alter table `in_table_example` add constraint `in_table_example_company_id_foreign` foreign key (`company_id`) references `company` (`companyId`)create table "in_table_example" ("company_id" integer)
alter table "in_table_example" add constraint "in_table_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")create table "in_table_example" ("company_id" integer)
alter table "in_table_example" add constraint "in_table_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")create table "in_table_example" ("company_id" integer)
alter table "in_table_example" add constraint "in_table_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId")create table `in_table_example` (`company_id` integer, foreign key(`company_id`) references `company`(`companyId`))onDelete
column.onDelete(command)
Sets the SQL command to be run "onDelete".
knex.schema.createTable('on_delete_example', function (table) {
table
.integer('company_id')
.references('company.companyId')
.onDelete('CASCADE');
});create table "on_delete_example" ("company_id" integer)
alter table "on_delete_example" add constraint "on_delete_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on delete CASCADECREATE TABLE [on_delete_example] ([company_id] int, CONSTRAINT [on_delete_example_company_id_foreign] FOREIGN KEY ([company_id]) REFERENCES [company] ([companyId]) ON DELETE CASCADE)create table `on_delete_example` (`company_id` int)
alter table `on_delete_example` add constraint `on_delete_example_company_id_foreign` foreign key (`company_id`) references `company` (`companyId`) on delete CASCADEcreate table "on_delete_example" ("company_id" integer)
alter table "on_delete_example" add constraint "on_delete_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on delete CASCADEcreate table "on_delete_example" ("company_id" integer)
alter table "on_delete_example" add constraint "on_delete_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on delete CASCADEcreate table "on_delete_example" ("company_id" integer)
alter table "on_delete_example" add constraint "on_delete_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on delete CASCADEcreate table `on_delete_example` (`company_id` integer, foreign key(`company_id`) references `company`(`companyId`) on delete CASCADE)onUpdate
column.onUpdate(command)
Sets the SQL command to be run "onUpdate".
knex.schema.createTable('on_update_example', function (table) {
table
.integer('company_id')
.references('company.companyId')
.onUpdate('CASCADE');
});create table "on_update_example" ("company_id" integer)
alter table "on_update_example" add constraint "on_update_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on update CASCADECREATE TABLE [on_update_example] ([company_id] int, CONSTRAINT [on_update_example_company_id_foreign] FOREIGN KEY ([company_id]) REFERENCES [company] ([companyId]) ON UPDATE CASCADE)create table `on_update_example` (`company_id` int)
alter table `on_update_example` add constraint `on_update_example_company_id_foreign` foreign key (`company_id`) references `company` (`companyId`) on update CASCADEcreate table "on_update_example" ("company_id" integer)
alter table "on_update_example" add constraint "on_update_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on update CASCADEcreate table "on_update_example" ("company_id" integer)
alter table "on_update_example" add constraint "on_update_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on update CASCADEcreate table "on_update_example" ("company_id" integer)
alter table "on_update_example" add constraint "on_update_example_company_id_foreign" foreign key ("company_id") references "company" ("companyId") on update CASCADEcreate table `on_update_example` (`company_id` integer, foreign key(`company_id`) references `company`(`companyId`) on update CASCADE)defaultTo
column.defaultTo(value, options={[constraintName: string = undefined]))
Sets the default value for the column on an insert.
In MSSQL a constraintName option may be passed to ensure a specific constraint name:
knex.schema.createTable('users', function (table) {
table
.string('column')
.defaultTo('value', { constraintName: 'df_table_value' });
});create table "users" ("column" varchar(255) default 'value')CREATE TABLE [users] ([column] nvarchar(255) CONSTRAINT [df_table_value] DEFAULT 'value')create table `users` (`column` varchar(255) default 'value')create table "users" ("column" varchar2(255) default 'value')create table "users" ("column" varchar(255) default 'value')create table "users" ("column" varchar(255) default 'value')create table `users` (`column` varchar(255) default 'value')unsigned
column.unsigned()
Specifies a number as unsigned. Only for numeric values.
knex.schema.createTable('unsigned_example', function (table) {
table.integer('age').unsigned();
});create table "unsigned_example" ("age" integer)CREATE TABLE [unsigned_example] ([age] int)create table `unsigned_example` (`age` int unsigned)create table "unsigned_example" ("age" integer)create table "unsigned_example" ("age" integer)create table "unsigned_example" ("age" integer)create table `unsigned_example` (`age` integer)notNullable
column.notNullable()
Adds a not null on the current column being created.
knex.schema.createTable('not_nullable_example', function (table) {
table.string('email').notNullable();
});create table "not_nullable_example" ("email" varchar(255) not null)CREATE TABLE [not_nullable_example] ([email] nvarchar(255) not null)create table `not_nullable_example` (`email` varchar(255) not null)create table "not_nullable_example" ("email" varchar2(255) not null)create table "not_nullable_example" ("email" varchar(255) not null)create table "not_nullable_example" ("email" varchar(255) not null)create table `not_nullable_example` (`email` varchar(255) not null)nullable
column.nullable()
Default on column creation, this explicitly sets a field to be nullable.
knex.schema.createTable('nullable_example', function (table) {
table.string('nickname').nullable();
});create table "nullable_example" ("nickname" varchar(255) null)CREATE TABLE [nullable_example] ([nickname] nvarchar(255) null)create table `nullable_example` (`nickname` varchar(255) null)create table "nullable_example" ("nickname" varchar2(255) null)create table "nullable_example" ("nickname" varchar(255) null)create table "nullable_example" ("nickname" varchar(255) null)create table `nullable_example` (`nickname` varchar(255) null)first
column.first()
Sets the column to be inserted on the first position, only used in MySQL alter tables.
knex.schema.table('users', function (table) {
table.string('nickname').first();
});alter table "users" add column "nickname" varchar(255)ALTER TABLE [users] ADD [nickname] nvarchar(255)alter table `users` add `nickname` varchar(255) firstalter table "users" add "nickname" varchar2(255)alter table "users" add column "nickname" varchar(255)alter table "users" add column "nickname" varchar(255)alter table `users` add column `nickname` varchar(255)after
column.after(field)
Sets the column to be inserted after another, only used in MySQL alter tables.
knex.schema.table('users', function (table) {
table.string('nickname').after('email');
});alter table "users" add column "nickname" varchar(255)ALTER TABLE [users] ADD [nickname] nvarchar(255)alter table `users` add `nickname` varchar(255) after `email`alter table "users" add "nickname" varchar2(255)alter table "users" add column "nickname" varchar(255)alter table "users" add column "nickname" varchar(255)alter table `users` add column `nickname` varchar(255)comment
column.comment(value)
Sets the comment for a column.
knex.schema.createTable('accounts', function (t) {
t.increments().primary();
t.string('email').unique().comment('This is the email field');
});create table "accounts" ("id" serial primary key, "email" varchar(255))
comment on column "accounts"."email" is 'This is the email field'
alter table "accounts" add constraint "accounts_email_unique" unique ("email")CREATE TABLE [accounts] ([id] int identity(1,1) not null primary key, [email] nvarchar(255))
IF EXISTS(SELECT * FROM sys.fn_listextendedproperty(N'MS_Description', N'Schema', N'dbo', N'Table', N'accounts', N'Column', N'email'))
EXEC sys.sp_updateextendedproperty N'MS_Description', N'This is the email field', N'Schema', N'dbo', N'Table', N'accounts', N'Column', N'email'
ELSE
EXEC sys.sp_addextendedproperty N'MS_Description', N'This is the email field', N'Schema', N'dbo', N'Table', N'accounts', N'Column', N'email'
CREATE UNIQUE INDEX [accounts_email_unique] ON [accounts] ([email]) WHERE [email] IS NOT NULLcreate table `accounts` (`id` int unsigned not null auto_increment primary key, `email` varchar(255) comment 'This is the email field')
alter table `accounts` add unique `accounts_email_unique`(`email`)create table "accounts" ("id" integer not null primary key, "email" varchar2(255))
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "accounts_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'accounts'; execute immediate ('create or replace trigger "accounts_autoinc_trg" BEFORE INSERT on "accounts" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "accounts_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "accounts" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;
comment on column "accounts"."email" is 'This is the email field'
alter table "accounts" add constraint "accounts_email_unique" unique ("email")create table "accounts" ("id" serial primary key, "email" varchar(255))
comment on column "accounts"."email" is 'This is the email field'
alter table "accounts" add constraint "accounts_email_unique" unique ("email")create table "accounts" ("id" integer identity(1,1) primary key not null, "email" varchar(255))
comment on column "accounts"."email" is 'This is the email field'
alter table "accounts" add constraint "accounts_email_unique" unique ("email")create table `accounts` (`id` integer not null primary key autoincrement, `email` varchar(255))
create unique index `accounts_email_unique` on `accounts` (`email`)collate
column.collate(collation)
Sets the collation for a column (only works in MySQL). Here is a list of all available collations: https://dev.mysql.com/doc/refman/5.5/en/charset-charsets.html
knex.schema.createTable('users', function (t) {
t.increments();
t.string('email').unique().collate('utf8_unicode_ci');
});create table "users" ("id" serial primary key, "email" varchar(255))
alter table "users" add constraint "users_email_unique" unique ("email")CREATE TABLE [users] ([id] int identity(1,1) not null primary key, [email] nvarchar(255))
CREATE UNIQUE INDEX [users_email_unique] ON [users] ([email]) WHERE [email] IS NOT NULLcreate table `users` (`id` int unsigned not null auto_increment primary key, `email` varchar(255) collate 'utf8_unicode_ci')
alter table `users` add unique `users_email_unique`(`email`)create table "users" ("id" integer not null primary key, "email" varchar2(255))
DECLARE PK_NAME VARCHAR(200); BEGIN EXECUTE IMMEDIATE ('CREATE SEQUENCE "users_seq"'); SELECT cols.column_name INTO PK_NAME FROM all_constraints cons, all_cons_columns cols WHERE cons.constraint_type = 'P' AND cons.constraint_name = cols.constraint_name AND cons.owner = cols.owner AND cols.table_name = 'users'; execute immediate ('create or replace trigger "users_autoinc_trg" BEFORE INSERT on "users" for each row declare checking number := 1; begin if (:new."' || PK_NAME || '" is null) then while checking >= 1 loop select "users_seq".nextval into :new."' || PK_NAME || '" from dual; select count("' || PK_NAME || '") into checking from "users" where "' || PK_NAME || '" = :new."' || PK_NAME || '"; end loop; end if; end;'); END;
alter table "users" add constraint "users_email_unique" unique ("email")create table "users" ("id" serial primary key, "email" varchar(255))
alter table "users" add constraint "users_email_unique" unique ("email")create table "users" ("id" integer identity(1,1) primary key not null, "email" varchar(255))
alter table "users" add constraint "users_email_unique" unique ("email")create table `users` (`id` integer not null primary key autoincrement, `email` varchar(255))
create unique index `users_email_unique` on `users` (`email`)View
columns
view.columns([columnNames])
Specify the columns of the view.
knex.schema.createView('users_view', function (view) {
view.columns(['first_name', 'last_name']);
view.as(knex('users').select('first_name').where('age', '>', '18'));
});create view "users_view" ("first_name", "last_name") as select "first_name" from "users" where "age" > '18'CREATE VIEW [users_view] ([first_name], [last_name]) AS select [first_name] from [users] where [age] > '18'create view `users_view` (`first_name`, `last_name`) as select `first_name` from `users` where `age` > '18'create view "users_view" ("first_name", "last_name") as select "first_name" from "users" where "age" > '18'create view "users_view" ("first_name", "last_name") as select "first_name" from "users" where "age" > '18'create view "users_view" ("first_name", "last_name") as select "first_name" from "users" where "age" > '18'create view `users_view` (`first_name`, `last_name`) as select `first_name` from `users` where `age` > '18'as
view.as(selectQuery)
Specify the select query of the view.
knex.schema.createView('users_view', function (view) {
view.as(knex('users').select('first_name'));
});create view "users_view" as select "first_name" from "users"CREATE VIEW [users_view] AS select [first_name] from [users]create view `users_view` as select `first_name` from `users`create view "users_view" as select "first_name" from "users"create view "users_view" as select "first_name" from "users"create view "users_view" as select "first_name" from "users"create view `users_view` as select `first_name` from `users`checkOption SQMSCR
view.checkOption()
Add check option on the view definition. On OracleDb, MySQL, PostgreSQL and Redshift.
knex.schema.createView('users_view', function (view) {
view.checkOption();
view.as(knex('users').select('first_name'));
});create view "users_view" as select "first_name" from "users" with check optionError: check option definition is not supported by this dialect.create view `users_view` as select `first_name` from `users` with check optioncreate view "users_view" as select "first_name" from "users" with check optioncreate view "users_view" as select "first_name" from "users" with check optioncreate view "users_view" as select "first_name" from "users" with check optionError: check option definition is not supported by this dialect.localCheckOption SQMSORCR
view.localCheckOption()
Add local check option on the view definition. On MySQL, PostgreSQL and Redshift.
knex.schema.createView('users_view', function (view) {
view.localCheckOption();
view.as(knex('users').select('first_name'));
});create view "users_view" as select "first_name" from "users" with local check optionError: check option definition is not supported by this dialect.create view `users_view` as select `first_name` from `users` with local check optionError: check option definition is not supported by this dialect.create view "users_view" as select "first_name" from "users" with local check optioncreate view "users_view" as select "first_name" from "users" with local check optionError: check option definition is not supported by this dialect.cascadedCheckOption SQMSORCR
view.cascadedCheckOption()
Add cascaded check option on the view definition. On MySQL, PostgreSQL and Redshift.
knex.schema.createView('users_view', function (view) {
view.cascadedCheckOption();
view.as(knex('users').select('first_name'));
});create view "users_view" as select "first_name" from "users" with cascaded check optionError: check option definition is not supported by this dialect.create view `users_view` as select `first_name` from `users` with cascaded check optionError: check option definition is not supported by this dialect.create view "users_view" as select "first_name" from "users" with cascaded check optioncreate view "users_view" as select "first_name" from "users" with cascaded check optionError: check option definition is not supported by this dialect.Checks
check
table.check(checkPredicate, [bindings], [constraintName]))
Specify a check on table or column with raw predicate.
knex.schema.createTable('product', function (table) {
table.integer('price_min');
table.integer('price');
table.check('?? >= ??', ['price', 'price_min']);
});create table "product" ("price_min" integer, "price" integer, check ("price" >= "price_min"))CREATE TABLE [product] ([price_min] int, [price] int, check ([price] >= [price_min]))create table `product` (`price_min` int, `price` int, check (`price` >= `price_min`))create table "product" ("price_min" integer, "price" integer, check ("price" >= "price_min"))create table "product" ("price_min" integer, "price" integer, check ("price" >= "price_min"))create table "product" ("price_min" integer, "price" integer, check ("price" >= "price_min"))create table `product` (`price_min` integer, `price` integer, check (`price` >= `price_min`))checkPositive
column.checkPositive([constraintName])
Specify a check on column that test if the value of column is positive.
knex.schema.createTable('product', function (table) {
table.integer('price').checkPositive();
});create table "product" ("price" integer check ("price" > 0))CREATE TABLE [product] ([price] int check ([price] > 0))create table `product` (`price` int check (`price` > 0))create table "product" ("price" integer check ("price" > 0))create table "product" ("price" integer check ("price" > 0))create table "product" ("price" integer check ("price" > 0))create table `product` (`price` integer check (`price` > 0))checkNegative
column.checkNegative([constraintName])
Specify a check on column that test if the value of column is negative.
knex.schema.createTable('product', function (table) {
table.integer('price_decrease').checkNegative();
});create table "product" ("price_decrease" integer check ("price_decrease" < 0))CREATE TABLE [product] ([price_decrease] int check ([price_decrease] < 0))create table `product` (`price_decrease` int check (`price_decrease` < 0))create table "product" ("price_decrease" integer check ("price_decrease" < 0))create table "product" ("price_decrease" integer check ("price_decrease" < 0))create table "product" ("price_decrease" integer check ("price_decrease" < 0))create table `product` (`price_decrease` integer check (`price_decrease` < 0))checkIn
column.checkIn(values, [constraintName])
Specify a check on column that test if the value of column is contained in a set of specified values.
knex.schema.createTable('product', function (table) {
table.string('type').checkIn(['table', 'chair', 'sofa']);
});create table "product" ("type" varchar(255) check ("type" in ('table','chair','sofa')))CREATE TABLE [product] ([type] nvarchar(255) check ([type] in ('table','chair','sofa')))create table `product` (`type` varchar(255) check (`type` in ('table','chair','sofa')))create table "product" ("type" varchar2(255) check ("type" in ('table', 'chair', 'sofa')))create table "product" ("type" varchar(255) check ("type" in ('table','chair','sofa')))create table "product" ("type" varchar(255) check ("type" in ('table','chair','sofa')))create table `product` (`type` varchar(255) check (`type` in ('table','chair','sofa')))checkNotIn
column.checkNotIn(values, [constraintName])
Specify a check on column that test if the value of column is not contains in a set of specified values.
knex.schema.createTable('product', function (table) {
table.string('type').checkNotIn(['boot', 'shoe']);
});create table "product" ("type" varchar(255) check ("type" not in ('boot','shoe')))CREATE TABLE [product] ([type] nvarchar(255) check ([type] not in ('boot','shoe')))create table `product` (`type` varchar(255) check (`type` not in ('boot','shoe')))create table "product" ("type" varchar2(255) check ("type" not in ('boot','shoe')))create table "product" ("type" varchar(255) check ("type" not in ('boot','shoe')))create table "product" ("type" varchar(255) check ("type" not in ('boot','shoe')))create table `product` (`type` varchar(255) check (`type` not in ('boot','shoe')))checkBetween
column.checkBetween(values, [constraintName])
Specify a check on column that test if the value of column is within a range of values.
knex.schema.createTable('product', function (table) {
table.integer('price').checkBetween([0, 100]);
});
// You can add checks on multiple intervals
knex.schema.createTable('product', function (table) {
table.integer('price').checkBetween([
[0, 20],
[30, 40],
]);
});create table "product" ("price" integer check ("price" between 0 and 100))
-- ----
create table "product" ("price" integer check ("price" between 0 and 20 or "price" between 30 and 40))CREATE TABLE [product] ([price] int check ([price] between 0 and 100))
-- ----
CREATE TABLE [product] ([price] int check ([price] between 0 and 20 or [price] between 30 and 40))create table `product` (`price` int check (`price` between 0 and 100))
-- ----
create table `product` (`price` int check (`price` between 0 and 20 or `price` between 30 and 40))create table "product" ("price" integer check ("price" between 0 and 100))
-- ----
create table "product" ("price" integer check ("price" between 0 and 20 or "price" between 30 and 40))create table "product" ("price" integer check ("price" between 0 and 100))
-- ----
create table "product" ("price" integer check ("price" between 0 and 20 or "price" between 30 and 40))create table "product" ("price" integer check ("price" between 0 and 100))
-- ----
create table "product" ("price" integer check ("price" between 0 and 20 or "price" between 30 and 40))create table `product` (`price` integer check (`price` between 0 and 100))
-- ----
create table `product` (`price` integer check (`price` between 0 and 20 or `price` between 30 and 40))checkLength
column.checkLength(operator, length, [constraintName])
Specify a check on column that test if the length of a string match the predicate.
knex.schema.createTable('product', function (table) {
// operator can be =, !=, <=, >=, <, >
table.string('phone').checkLength('=', 8);
});create table "product" ("phone" varchar(255) check (length("phone") = 8))CREATE TABLE [product] ([phone] nvarchar(255) check (LEN([phone]) = 8))create table `product` (`phone` varchar(255) check (length(`phone`) = 8))create table "product" ("phone" varchar2(255) check (length("phone") = 8))create table "product" ("phone" varchar(255) check (length("phone") = 8))create table "product" ("phone" varchar(255) check (length("phone") = 8))create table `product` (`phone` varchar(255) check (length(`phone`) = 8))checkRegex
column.checkRegex(regex, [constraintName])
Specify a check on column that test if the value match the specified regular expression. In MSSQL only simple pattern matching in supported but not regex syntax.
knex.schema.createTable('product', function (table) {
table.string('phone').checkRegex('[0-9]{8}');
// In MSSQL, {8} syntax don't work,
// you need to duplicate [0-9].
table.string('phone').checkRegex('[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]');
});create table "product" ("phone" varchar(255) check ("phone" ~ '[0-9]{8}'), "phone" varchar(255) check ("phone" ~ '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'))CREATE TABLE [product] ([phone] nvarchar(255) check ([phone] LIKE '%[0-9]{8}%'), [phone] nvarchar(255) check ([phone] LIKE '%[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]%'))create table `product` (`phone` varchar(255) check (`phone` REGEXP '[0-9]{8}'), `phone` varchar(255) check (`phone` REGEXP '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'))create table "product" ("phone" varchar2(255) check (REGEXP_LIKE("phone",'[0-9]{8}')), "phone" varchar2(255) check (REGEXP_LIKE("phone",'[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]')))create table "product" ("phone" varchar(255) check ("phone" ~ '[0-9]{8}'), "phone" varchar(255) check ("phone" ~ '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'))create table "product" ("phone" varchar(255) check ("phone" ~ '[0-9]{8}'), "phone" varchar(255) check ("phone" ~ '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'))create table `product` (`phone` varchar(255) check (`phone` REGEXP '[0-9]{8}'), `phone` varchar(255) check (`phone` REGEXP '[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]'))dropChecks
table.dropChecks([checkConstraintNames])
Drop checks constraint given an array of constraint names.
knex.schema.alterTable('product', function (table) {
table.dropChecks(['price_check', 'price_proportion_check']);
});alter table "product" drop constraint price_check, drop constraint price_proportion_checkalter table [product] drop constraint price_check, drop constraint price_proportion_checkalter table `product` drop constraint price_check, drop constraint price_proportion_checkalter table "product" drop constraint price_check, drop constraint price_proportion_checkalter table "product" drop constraint price_check, drop constraint price_proportion_checkalter table "product" drop constraint price_check, drop constraint price_proportion_checkalter table `product` drop constraint price_check, drop constraint price_proportion_check