diff --git a/.docker/clickhouse/cluster/server1_config.xml b/.docker/clickhouse/cluster/server1_config.xml
new file mode 100644
index 00000000..ecebb8c3
--- /dev/null
+++ b/.docker/clickhouse/cluster/server1_config.xml
@@ -0,0 +1,117 @@
+
+
+
+ 8123
+ 9009
+ clickhouse1
+
+ users.xml
+ default
+ default
+
+ 5368709120
+
+ /var/lib/clickhouse/
+ /var/lib/clickhouse/tmp/
+ /var/lib/clickhouse/user_files/
+ /var/lib/clickhouse/access/
+ 3
+
+
+ debug
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+ 1000M
+ 10
+ 1
+
+
+
+
+
+
+ clickhouse1
+ 9000
+
+
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ 9181
+ 1
+ /var/lib/clickhouse/coordination/log
+ /var/lib/clickhouse/coordination/snapshots
+
+
+ 10000
+ 30000
+ trace
+ 10000
+
+
+
+
+ 1
+ clickhouse1
+ 9000
+
+
+ 2
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ clickhouse1
+ 9181
+
+
+ clickhouse2
+ 9181
+
+
+
+
+ test_cluster
+ clickhouse1
+ 1
+
+
+
+ /clickhouse/test_cluster/task_queue/ddl
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 1000
+
+
+
+
+ Access-Control-Allow-Origin
+ *
+
+
+ Access-Control-Allow-Headers
+ accept, origin, x-requested-with, content-type, authorization
+
+
+ Access-Control-Allow-Methods
+ POST, GET, OPTIONS
+
+
+ Access-Control-Max-Age
+ 86400
+
+
+
diff --git a/.docker/clickhouse/cluster/server2_config.xml b/.docker/clickhouse/cluster/server2_config.xml
new file mode 100644
index 00000000..83d7bbb1
--- /dev/null
+++ b/.docker/clickhouse/cluster/server2_config.xml
@@ -0,0 +1,117 @@
+
+
+
+ 8123
+ 9009
+ clickhouse2
+
+ users.xml
+ default
+ default
+
+ 5368709120
+
+ /var/lib/clickhouse/
+ /var/lib/clickhouse/tmp/
+ /var/lib/clickhouse/user_files/
+ /var/lib/clickhouse/access/
+ 3
+
+
+ debug
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+ 1000M
+ 10
+ 1
+
+
+
+
+
+
+ clickhouse1
+ 9000
+
+
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ 9181
+ 2
+ /var/lib/clickhouse/coordination/log
+ /var/lib/clickhouse/coordination/snapshots
+
+
+ 10000
+ 30000
+ trace
+ 10000
+
+
+
+
+ 1
+ clickhouse1
+ 9000
+
+
+ 2
+ clickhouse2
+ 9000
+
+
+
+
+
+
+ clickhouse1
+ 9181
+
+
+ clickhouse2
+ 9181
+
+
+
+
+ test_cluster
+ clickhouse2
+ 1
+
+
+
+ /clickhouse/test_cluster/task_queue/ddl
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 1000
+
+
+
+
+ Access-Control-Allow-Origin
+ *
+
+
+ Access-Control-Allow-Headers
+ accept, origin, x-requested-with, content-type, authorization
+
+
+ Access-Control-Allow-Methods
+ POST, GET, OPTIONS
+
+
+ Access-Control-Max-Age
+ 86400
+
+
+
diff --git a/.docker/clickhouse/single/config.xml b/.docker/clickhouse/single/config.xml
new file mode 100644
index 00000000..218229cd
--- /dev/null
+++ b/.docker/clickhouse/single/config.xml
@@ -0,0 +1,54 @@
+
+
+
+ 8123
+ 9000
+
+ users.xml
+ default
+ default
+
+ 5368709120
+
+ /var/lib/clickhouse/
+ /var/lib/clickhouse/tmp/
+ /var/lib/clickhouse/user_files/
+ /var/lib/clickhouse/access/
+ 3
+
+
+ debug
+ /var/log/clickhouse-server/clickhouse-server.log
+ /var/log/clickhouse-server/clickhouse-server.err.log
+ 1000M
+ 10
+ 1
+
+
+
+ system
+
+ toYYYYMM(event_date)
+ 1000
+
+
+
+
+ Access-Control-Allow-Origin
+ *
+
+
+ Access-Control-Allow-Headers
+ accept, origin, x-requested-with, content-type, authorization
+
+
+ Access-Control-Allow-Methods
+ POST, GET, OPTIONS
+
+
+ Access-Control-Max-Age
+ 86400
+
+
+
+
diff --git a/.docker/clickhouse/users.xml b/.docker/clickhouse/users.xml
new file mode 100644
index 00000000..61188536
--- /dev/null
+++ b/.docker/clickhouse/users.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+ random
+
+
+
+
+
+
+
+ ::/0
+
+ default
+ default
+ 1
+
+
+
+
+
+
+ 3600
+ 0
+ 0
+ 0
+ 0
+ 0
+
+
+
+
diff --git a/.docker/docker-compose.cluster.yml b/.docker/docker-compose.cluster.yml
new file mode 100644
index 00000000..d3ce9996
--- /dev/null
+++ b/.docker/docker-compose.cluster.yml
@@ -0,0 +1,52 @@
+version: '3.5'
+
+services:
+ clickhouse1:
+ image: 'clickhouse/clickhouse-server:${CLICKHOUSE_VERSION-23.11-alpine}'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ hostname: clickhouse1
+ container_name: clickhouse-activerecord-clickhouse-server-1
+ ports:
+ - '8124:8123'
+ - '9001:9000'
+ volumes:
+ - './clickhouse/cluster/server1_config.xml:/etc/clickhouse-server/config.xml'
+ - './clickhouse/users.xml:/etc/clickhouse-server/users.xml'
+ healthcheck:
+ test: bash -c "exec 6<> /dev/tcp/localhost/8123"
+ interval: 5s
+
+ clickhouse2:
+ image: 'clickhouse/clickhouse-server:${CLICKHOUSE_VERSION-23.11-alpine}'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ hostname: clickhouse2
+ container_name: clickhouse-activerecord-clickhouse-server-2
+ ports:
+ - '8125:8123'
+ volumes:
+ - './clickhouse/cluster/server2_config.xml:/etc/clickhouse-server/config.xml'
+ - './clickhouse/users.xml:/etc/clickhouse-server/users.xml'
+ healthcheck:
+ test: bash -c "exec 6<> /dev/tcp/localhost/8123"
+ interval: 5s
+
+ # Using Nginx as a cluster entrypoint and a round-robin load balancer for HTTP requests
+ nginx:
+ image: 'nginx:1.23.1-alpine'
+ hostname: nginx
+ ports:
+ - '28123:8123'
+ volumes:
+ - './nginx/local.conf:/etc/nginx/conf.d/local.conf'
+ container_name: clickhouse-activerecord-nginx
+ depends_on:
+ clickhouse1:
+ condition: service_healthy
+ clickhouse2:
+ condition: service_healthy
diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml
new file mode 100644
index 00000000..5e3ce482
--- /dev/null
+++ b/.docker/docker-compose.yml
@@ -0,0 +1,17 @@
+version: '3.8'
+services:
+ clickhouse:
+ image: 'clickhouse/clickhouse-server:${CLICKHOUSE_VERSION-23.11-alpine}'
+ container_name: 'clickhouse-activerecord-clickhouse-server'
+ ports:
+ - '18123:8123'
+ ulimits:
+ nofile:
+ soft: 262144
+ hard: 262144
+ volumes:
+ - './clickhouse/single/config.xml:/etc/clickhouse-server/config.xml'
+ - './clickhouse/users.xml:/etc/clickhouse-server/users.xml'
+ healthcheck:
+ test: bash -c "exec 6<> /dev/tcp/localhost/8123"
+ interval: 5s
diff --git a/.docker/nginx/local.conf b/.docker/nginx/local.conf
new file mode 100644
index 00000000..35fd4512
--- /dev/null
+++ b/.docker/nginx/local.conf
@@ -0,0 +1,12 @@
+upstream clickhouse_cluster {
+ server clickhouse1:8123;
+ server clickhouse2:8123;
+}
+
+server {
+ listen 8123;
+ client_max_body_size 100M;
+ location / {
+ proxy_pass http://clickhouse_cluster;
+ }
+}
diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml
new file mode 100644
index 00000000..e41ca9bc
--- /dev/null
+++ b/.github/workflows/testing.yml
@@ -0,0 +1,97 @@
+name: Testing
+
+on:
+ push:
+ branches: [ "master" ]
+ pull_request:
+ branches: [ "master" ]
+
+jobs:
+ tests_single:
+ name: Testing single server
+ runs-on: ubuntu-latest
+
+ env:
+ CLICKHOUSE_PORT: 18123
+ CLICKHOUSE_DATABASE: default
+
+ strategy:
+ fail-fast: true
+ max-parallel: 1
+ matrix:
+ version:
+ - ruby: 2.7
+ rails: 7.1.3
+ - ruby: 3.0
+ rails: 7.1.3
+ - ruby: 3.2
+ rails: 7.1.3
+ - ruby: 3.2
+ rails: 7.2.1
+ clickhouse: [ '22.1', '24.6' ]
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Start ClickHouse ${{ matrix.clickhouse }}
+ uses: isbang/compose-action@v1.5.1
+ env:
+ CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
+ with:
+ compose-file: '.docker/docker-compose.yml'
+ down-flags: '--volumes'
+
+ - run: echo 'gem "activerecord", "~> ${{ matrix.version.rails }}"' >> Gemfile
+
+ - name: Set up Ruby ${{ matrix.version.ruby }}
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.version.ruby }}
+ bundler-cache: true
+
+ - run: bundle exec rspec spec/single --format progress
+
+ tests_cluster:
+ name: Testing cluster server
+ runs-on: ubuntu-latest
+
+ env:
+ CLICKHOUSE_PORT: 28123
+ CLICKHOUSE_DATABASE: default
+ CLICKHOUSE_CLUSTER: test_cluster
+
+ strategy:
+ fail-fast: true
+ max-parallel: 1
+ matrix:
+ version:
+ - ruby: 2.7
+ rails: 7.1.3
+ - ruby: 3.0
+ rails: 7.1.3
+ - ruby: 3.2
+ rails: 7.1.3
+ - ruby: 3.2
+ rails: 7.2.0
+ clickhouse: [ '22.1', '24.6' ]
+
+ steps:
+ - uses: actions/checkout@v4
+
+ - name: Start ClickHouse Cluster ${{ matrix.clickhouse }}
+ uses: isbang/compose-action@v1.5.1
+ env:
+ CLICKHOUSE_VERSION: ${{ matrix.clickhouse }}
+ with:
+ compose-file: '.docker/docker-compose.cluster.yml'
+ down-flags: '--volumes'
+
+ - run: echo 'gem "activerecord", "~> ${{ matrix.version.rails }}"' >> Gemfile
+
+ - name: Set up Ruby ${{ matrix.version.ruby }}
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.version.ruby }}
+ bundler-cache: true
+
+ - run: bundle exec rspec spec/cluster --format progress
diff --git a/.gitignore b/.gitignore
index 42b720a5..63ff4ca8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -54,3 +54,4 @@ crashlytics.properties
crashlytics-build.properties
fabric.properties
.rspec_status
+.tool-versions
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3dbfb857..07c1d293 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,52 @@
+### Version 1.1.2 (Aug 27, 2024)
+* 🎉 Support for rails 7.2 #156
+* Add method `views` for getting table `View` list in #152
+* Add support for Map datatype in #144
+* Add support window named functions
+* Fix schema dumper default values for number
+* Normalize table name in schema dump in #148
+* Noop savepoint functionality in #150
+* Fix `#find_by` in #153
+* Add RSpec configure
+* Fix detect model primary key
+
+### Version 1.0.7 (Apr 27, 2024)
+
+* Support table indexes
+* Fix non-canonical UUID by [@PauloMiranda98](https://github.com/PauloMiranda98) in (#117)
+* Fix precision loss due to JSON float parsing by [@jenskdsgn](https://github.com/jenskdsgn) in (#129)
+* Support functions by [@felix-dumit](https://github.com/felix-dumit) in (#120)
+* Hotfix/rails71 change column by [@trumenov](https://github.com/trumenov) in (#132)
+* Fix DB tasks
+
+### Version 1.0.5 (Mar 14, 2024)
+
+* GitHub workflows
+* Fix injection internal and schema classes for rails 7
+* Add support for binary string by [@PauloMiranda98](https://github.com/PauloMiranda98) in (#116)
+
+### Version 1.0.4 (Feb 2, 2024)
+
+* Use ILIKE for `model.arel_table[:column]#matches` by [@stympy](https://github.com/stympy) in (#115)
+* Fixed `insert_all` for array column (#71)
+* Register Bool and UUID in type map by [@lukinski](https://github.com/lukinski) in (#110)
+* Refactoring `final` method
+* Support update & delete for clickhouse from version 23.3 and newer (#93)
+
+### Version 1.0.0 (Nov 29, 2023)
+
+ * Full support Rails 7.1+
+ * Full support primary or multiple databases
+
+### Version 0.6.0 (Oct 19, 2023)
+
+ * Added `Bool` column type instead `Uint8` (#78). Supports ClickHouse 22+ database only
+ * Added `final` method (#81) (The `ar_internal_metadata` table needs to be deleted after a gem update)
+ * Added `settings` method (#82)
+ * Fixed convert aggregation type (#92)
+ * Fixed raise error not database exist (#91)
+ * Fixed internal metadata update (#84)
+
### Version 0.5.10 (Jun 22, 2022)
* Fixes to create_table method (#70)
diff --git a/README.md b/README.md
index c6fb3645..d10a531f 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
# Clickhouse::Activerecord
-A Ruby database ActiveRecord driver for ClickHouse. Support Rails >= 5.2.
-Support ClickHouse version from 20.9 LTS.
+A Ruby database ActiveRecord driver for ClickHouse. Support Rails >= 7.1.
+Support ClickHouse version from 22.0 LTS.
## Installation
@@ -50,41 +50,31 @@ class ActionView < ActiveRecord::Base
end
```
-## Usage in Rails 5
+## Usage in Rails
Add your `database.yml` connection information with postfix `_clickhouse` for you environment:
```yml
-development_clickhouse:
+development:
adapter: clickhouse
database: database
```
-Add to your model:
+Your model example:
```ruby
class Action < ActiveRecord::Base
- establish_connection "#{Rails.env}_clickhouse".to_sym
end
```
For materialized view model add:
```ruby
class ActionView < ActiveRecord::Base
- establish_connection "#{Rails.env}_clickhouse".to_sym
self.is_view = true
end
```
-Or global connection:
-
-```yml
-development:
- adapter: clickhouse
- database: database
-```
-
-## Usage in Rails 6 with second database
+## Usage in Rails with second database
Add your `database.yml` connection information for you environment:
@@ -102,31 +92,31 @@ Connection [Multiple Databases with Active Record](https://guides.rubyonrails.or
```ruby
class Action < ActiveRecord::Base
- connects_to database: { writing: :clickhouse, reading: :clickhouse }
+ establish_connection :clickhouse
end
```
### Rake tasks
-**Note!** For Rails 6 you can use default rake tasks if you configure `migrations_paths` in your `database.yml`, for example: `rake db:migrate`
-
Create / drop / purge / reset database:
- $ rake clickhouse:create
- $ rake clickhouse:drop
- $ rake clickhouse:purge
- $ rake clickhouse:reset
+ $ rake db:create
+ $ rake db:drop
+ $ rake db:purge
+ $ rake db:reset
-Prepare system tables for rails:
+Or with multiple databases:
- $ rake clickhouse:prepare_schema_migration_table
- $ rake clickhouse:prepare_internal_metadata_table
+ $ rake db:create:clickhouse
+ $ rake db:drop:clickhouse
+ $ rake db:purge:clickhouse
+ $ rake db:reset:clickhouse
Migration:
$ rails g clickhouse_migration MIGRATION_NAME COLUMNS
- $ rake clickhouse:migrate
- $ rake clickhouse:rollback
+ $ rake db:migrate
+ $ rake db:rollback
### Dump / Load for multiple using databases
@@ -134,11 +124,11 @@ If you using multiple databases, for example: PostgreSQL, Clickhouse.
Schema dump to `db/clickhouse_schema.rb` file:
- $ rake clickhouse:schema:dump
+ $ rake db:schema:dump:clickhouse
Schema load from `db/clickhouse_schema.rb` file:
- $ rake clickhouse:schema:load
+ $ rake db:schema:load:clickhouse
For export schema to PostgreSQL, you need use:
@@ -159,13 +149,21 @@ Structure load from `db/clickhouse_structure.sql` file:
$ rake db:schema:dump
$ rake db:schema:load
$ rake db:structure:dump
- $ rake db:structure:load
+ $ rake db:structure:load
+
+### RSpec
+
+For auto truncate tables before each test add to `spec/rails_helper.rb` file:
+
+```
+require 'clickhouse-activerecord/rspec'
+```
### Insert and select data
```ruby
Action.where(url: 'http://example.com', date: Date.current).where.not(name: nil).order(created_at: :desc).limit(10)
-# Clickhouse Action Load (10.3ms) SELECT actions.* FROM actions WHERE actions.date = '2017-11-29' AND actions.url = 'http://example.com' AND (actions.name IS NOT NULL) ORDER BY actions.created_at DESC LIMIT 10
+# Clickhouse Action Load (10.3ms) SELECT actions.* FROM actions WHERE actions.date = '2017-11-29' AND actions.url = 'http://example.com' AND (actions.name IS NOT NULL) ORDER BY actions.created_at DESC LIMIT 10
#=> #]>
Action.create(url: 'http://example.com', date: Date.yesterday)
@@ -175,6 +173,18 @@ Action.create(url: 'http://example.com', date: Date.yesterday)
ActionView.maximum(:date)
# Clickhouse (10.3ms) SELECT maxMerge(actions.date) FROM actions
#=> 'Wed, 29 Nov 2017'
+
+Action.where(date: Date.current).final.limit(10)
+# Clickhouse Action Load (10.3ms) SELECT actions.* FROM actions FINAL WHERE actions.date = '2017-11-29' LIMIT 10
+#=> #]>
+
+Action.settings(optimize_read_in_order: 1).where(date: Date.current).limit(10)
+# Clickhouse Action Load (10.3ms) SELECT actions.* FROM actions FINAL WHERE actions.date = '2017-11-29' LIMIT 10 SETTINGS optimize_read_in_order = 1
+#=> #]>
+
+User.joins(:actions).using(:group_id)
+# Clickhouse User Load (10.3ms) SELECT users.* FROM users INNER JOIN actions USING group_id
+#=> #]>
```
@@ -183,25 +193,26 @@ ActionView.maximum(:date)
Integer types are unsigned by default. Specify signed values with `:unsigned =>
false`. The default integer is `UInt32`
-| Type (bit size) | Range | :limit (byte size) |
-| :--- | :----: | ---: |
-| Int8 | -128 to 127 | 1 |
-| Int16 | -32768 to 32767 | 2 |
-| Int32 | -2147483648 to 2,147,483,647 | 3,4 |
-| Int64 | -9223372036854775808 to 9223372036854775807] | 5,6,7,8 |
-| Int128 | ... | 9 - 15 |
-| Int256 | ... | 16+ |
-| UInt8 | 0 to 255 | 1 |
-| UInt16 | 0 to 65,535 | 2 |
-| UInt32 | 0 to 4,294,967,295 | 3,4 |
-| UInt64 | 0 to 18446744073709551615 | 5,6,7,8 |
-| UInt256 | 0 to ... | 8+ |
-| Array | ... | ... |
+| Type (bit size) | Range | :limit (byte size) |
+|:----------------|:--------------------------------------------:|-------------------:|
+| Int8 | -128 to 127 | 1 |
+| Int16 | -32768 to 32767 | 2 |
+| Int32 | -2147483648 to 2,147,483,647 | 3,4 |
+| Int64 | -9223372036854775808 to 9223372036854775807] | 5,6,7,8 |
+| Int128 | ... | 9 - 15 |
+| Int256 | ... | 16+ |
+| UInt8 | 0 to 255 | 1 |
+| UInt16 | 0 to 65,535 | 2 |
+| UInt32 | 0 to 4,294,967,295 | 3,4 |
+| UInt64 | 0 to 18446744073709551615 | 5,6,7,8 |
+| UInt256 | 0 to ... | 8+ |
+| Array | ... | ... |
+| Map | ... | ... |
Example:
-``` ruby
-class CreateDataItems < ActiveRecord::Migration
+```ruby
+class CreateDataItems < ActiveRecord::Migration[7.1]
def change
create_table "data_items", id: false, options: "VersionedCollapsingMergeTree(sign, version) PARTITION BY toYYYYMM(day) ORDER BY category", force: :cascade do |t|
t.date "day", null: false
@@ -210,9 +221,43 @@ class CreateDataItems < ActiveRecord::Migration
t.integer "sign", limit: 1, unsigned: false, default: -> { "CAST(1, 'Int8')" }, null: false
t.integer "version", limit: 8, default: -> { "CAST(toUnixTimestamp(now()), 'UInt64')" }, null: false
end
+
+ create_table "with_index", id: false, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
+ t.integer :int1, null: false
+ t.integer :int2, null: false
+ t.date :date, null: false
+
+ t.index '(int1 * int2, date)', name: 'idx', type: 'minmax', granularity: 3
+ end
+
+ remove_index :some, 'idx'
+
+ add_index :some, 'int1 * int2', name: 'idx2', type: 'set(10)', granularity: 4
+ end
+end
+```
+
+Create table with custom column structure and codec compression:
+
+```ruby
+class CreateDataItems < ActiveRecord::Migration[7.1]
+ def change
+ create_table "data_items", id: false, options: "MergeTree PARTITION BY toYYYYMM(timestamp) ORDER BY timestamp", force: :cascade do |t|
+ t.integer :user_id, limit: 8, codec: 'DoubleDelta, LZ4'
+ t.column "timestamp", "DateTime('UTC') CODEC(DoubleDelta, LZ4)"
+ end
+ end
+end
+```
+
+Create Buffer table with connection database name:
+
+```ruby
+class CreateDataItems < ActiveRecord::Migration[7.1]
+ def change
+ create_table :some_buffers, as: :some, options: "Buffer(#{connection.database}, some, 1, 10, 60, 100, 10000, 10000000, 100000000)"
end
end
-
```
@@ -236,6 +281,7 @@ Donations to this project are going directly to [PNixx](https://github.com/PNixx
* BTC address: `1H3rhpf7WEF5JmMZ3PVFMQc7Hm29THgUfN`
* ETH address: `0x6F094365A70fe7836A633d2eE80A1FA9758234d5`
* XMR address: `42gP71qLB5M43RuDnrQ3vSJFFxis9Kw9VMURhpx9NLQRRwNvaZRjm2TFojAMC8Fk1BQhZNKyWhoyJSn5Ak9kppgZPjE17Zh`
+* TON address: `UQBt0-s1igIpJoEup0B1yAUkZ56rzbpruuAjNhQ26MVCaNlC`
## Development
@@ -243,6 +289,12 @@ After checking out the repo, run `bin/setup` to install dependencies. You can al
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
+Testing github actions:
+
+```bash
+act
+```
+
## Contributing
Bug reports and pull requests are welcome on GitHub at [https://github.com/pnixx/clickhouse-activerecord](https://github.com/pnixx/clickhouse-activerecord). This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
diff --git a/clickhouse-activerecord.gemspec b/clickhouse-activerecord.gemspec
index 4de37018..dea07f4a 100644
--- a/clickhouse-activerecord.gemspec
+++ b/clickhouse-activerecord.gemspec
@@ -24,9 +24,8 @@ Gem::Specification.new do |spec|
spec.require_paths = ['lib']
spec.add_runtime_dependency 'bundler', '>= 1.13.4'
- spec.add_runtime_dependency 'activerecord', '>= 5.2'
+ spec.add_runtime_dependency 'activerecord', '~> 7.1'
- spec.add_development_dependency 'bundler', '>= 1.15'
spec.add_development_dependency 'rake', '~> 13.0'
spec.add_development_dependency 'rspec', '~> 3.4'
spec.add_development_dependency 'pry', '~> 0.12'
diff --git a/lib/active_record/connection_adapters/clickhouse/column.rb b/lib/active_record/connection_adapters/clickhouse/column.rb
new file mode 100644
index 00000000..6a0e23a8
--- /dev/null
+++ b/lib/active_record/connection_adapters/clickhouse/column.rb
@@ -0,0 +1,21 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module Clickhouse
+ class Column < ActiveRecord::ConnectionAdapters::Column
+
+ attr_reader :codec
+
+ def initialize(name, default, sql_type_metadata = nil, null = true, default_function = nil, codec: nil, **args)
+ super
+ @codec = codec
+ end
+
+ private
+
+ def deduplicated
+ self
+ end
+ end
+ end
+ end
+end
diff --git a/lib/active_record/connection_adapters/clickhouse/oid/date_time.rb b/lib/active_record/connection_adapters/clickhouse/oid/date_time.rb
index 909b83d4..017c40fe 100644
--- a/lib/active_record/connection_adapters/clickhouse/oid/date_time.rb
+++ b/lib/active_record/connection_adapters/clickhouse/oid/date_time.rb
@@ -9,9 +9,8 @@ class DateTime < Type::DateTime # :nodoc:
def serialize(value)
value = super
return unless value
- return value.strftime('%Y-%m-%d %H:%M:%S') unless value.acts_like?(:time)
- value.to_time.strftime('%Y-%m-%d %H:%M:%S')
+ value.strftime('%Y-%m-%d %H:%M:%S' + (@precision.present? && @precision > 0 ? ".%#{@precision}N" : ''))
end
def type_cast_from_database(value)
diff --git a/lib/active_record/connection_adapters/clickhouse/oid/map.rb b/lib/active_record/connection_adapters/clickhouse/oid/map.rb
new file mode 100644
index 00000000..89e420c8
--- /dev/null
+++ b/lib/active_record/connection_adapters/clickhouse/oid/map.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module Clickhouse
+ module OID # :nodoc:
+ class Map < Type::Value # :nodoc:
+
+ def initialize(sql_type)
+ case sql_type
+ when /U?Int(\d+)/
+ @subtype = :integer
+ @limit = bits_to_limit(Regexp.last_match(1)&.to_i)
+ when /DateTime/
+ @subtype = :datetime
+ when /Date/
+ @subtype = :date
+ else
+ @subtype = :string
+ end
+ end
+
+ def type
+ @subtype
+ end
+
+ def deserialize(value)
+ if value.is_a?(::Hash)
+ value.map { |k, item| [k.to_s, deserialize(item)] }.to_h
+ elsif value.is_a?(::Array)
+ value.map { |item| deserialize(item) }
+ else
+ return value if value.nil?
+ case @subtype
+ when :integer
+ value.to_i
+ when :datetime
+ ::DateTime.parse(value)
+ when :date
+ ::Date.parse(value)
+ else
+ super
+ end
+ end
+ end
+
+ def serialize(value)
+ if value.is_a?(::Hash)
+ value.map { |k, item| [k.to_s, serialize(item)] }.to_h
+ elsif value.is_a?(::Array)
+ value.map { |item| serialize(item) }
+ else
+ return value if value.nil?
+ case @subtype
+ when :integer
+ value.to_i
+ when :datetime
+ DateTime.new.serialize(value)
+ when :date
+ Date.new.serialize(value)
+ when :string
+ value.to_s
+ else
+ super
+ end
+ end
+ end
+
+ private
+
+ def bits_to_limit(bits)
+ case bits
+ when 8 then 1
+ when 16 then 2
+ when 32 then 4
+ when 64 then 8
+ when 128 then 16
+ when 256 then 32
+ end
+ end
+
+ end
+ end
+ end
+ end
+end
diff --git a/lib/active_record/connection_adapters/clickhouse/oid/uuid.rb b/lib/active_record/connection_adapters/clickhouse/oid/uuid.rb
new file mode 100644
index 00000000..7e99c08c
--- /dev/null
+++ b/lib/active_record/connection_adapters/clickhouse/oid/uuid.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+module ActiveRecord
+ module ConnectionAdapters
+ module Clickhouse
+ module OID # :nodoc:
+ class Uuid < Type::Value # :nodoc:
+ ACCEPTABLE_UUID = %r{\A(\{)?([a-fA-F0-9]{4}-?){8}(?(1)\}|)\z}
+ CANONICAL_UUID = %r{\A[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}\z}
+
+ alias :serialize :deserialize
+
+ def type
+ :uuid
+ end
+
+ def changed?(old_value, new_value, _new_value_before_type_cast)
+ old_value.class != new_value.class ||
+ new_value != old_value
+ end
+
+ def changed_in_place?(raw_old_value, new_value)
+ raw_old_value.class != new_value.class ||
+ new_value != raw_old_value
+ end
+
+ private
+
+ def cast_value(value)
+ value = value.to_s
+ format_uuid(value) if value.match?(ACCEPTABLE_UUID)
+ end
+
+ def format_uuid(uuid)
+ if uuid.match?(CANONICAL_UUID)
+ uuid
+ else
+ uuid = uuid.delete("{}-").downcase
+ "#{uuid[..7]}-#{uuid[8..11]}-#{uuid[12..15]}-#{uuid[16..19]}-#{uuid[20..]}"
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/active_record/connection_adapters/clickhouse/quoting.rb b/lib/active_record/connection_adapters/clickhouse/quoting.rb
new file mode 100644
index 00000000..47c90dfb
--- /dev/null
+++ b/lib/active_record/connection_adapters/clickhouse/quoting.rb
@@ -0,0 +1,19 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module Clickhouse
+ module Quoting
+ extend ActiveSupport::Concern
+
+ module ClassMethods # :nodoc:
+ def quote_column_name(name)
+ name.to_s.include?('.') ? "`#{name}`" : name.to_s
+ end
+
+ def quote_table_name(name)
+ name.to_s
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/active_record/connection_adapters/clickhouse/schema_creation.rb b/lib/active_record/connection_adapters/clickhouse/schema_creation.rb
index 730a1c56..4af0d4e7 100644
--- a/lib/active_record/connection_adapters/clickhouse/schema_creation.rb
+++ b/lib/active_record/connection_adapters/clickhouse/schema_creation.rb
@@ -1,4 +1,3 @@
-# frozen_string_literal: true
begin
require "active_record/connection_adapters/deduplicable"
rescue LoadError => e
@@ -34,6 +33,15 @@ def add_column_options!(sql, options)
if options[:array]
sql.gsub!(/\s+(.*)/, ' Array(\1)')
end
+ if options[:map] == :array
+ sql.gsub!(/\s+(.*)/, ' Map(String, Array(\1))')
+ end
+ if options[:map] == true
+ sql.gsub!(/\s+(.*)/, ' Map(String, \1)')
+ end
+ if options[:codec]
+ sql.gsub!(/\s+(.*)/, " \\1 CODEC(#{options[:codec]})")
+ end
sql.gsub!(/(\sString)\(\d+\)/, '\1')
sql << " DEFAULT #{quote_default_expression(options[:default], options[:column])}" if options_include_default?(options)
sql
@@ -89,14 +97,23 @@ def visit_TableDefinition(o)
create_sql = +"CREATE#{table_modifier_in_create(o)} #{o.view ? "VIEW" : "TABLE"} "
create_sql << "IF NOT EXISTS " if o.if_not_exists
create_sql << "#{quote_table_name(o.name)} "
+ add_as_clause!(create_sql, o) if o.as && !o.view
add_to_clause!(create_sql, o) if o.materialized
statements = o.columns.map { |c| accept c }
statements << accept(o.primary_keys) if o.primary_keys
+
+ if supports_indexes_in_create?
+ indexes = o.indexes.map do |expression, options|
+ accept(@conn.add_index_options(o.name, expression, **options))
+ end
+ statements.concat(indexes)
+ end
+
create_sql << "(#{statements.join(', ')})" if statements.present?
# Attach options for only table or materialized view without TO section
add_table_options!(create_sql, o) if !o.view || o.view && o.materialized && !o.to
- add_as_clause!(create_sql, o)
+ add_as_clause!(create_sql, o) if o.as && o.view
create_sql
end
@@ -124,12 +141,25 @@ def visit_ChangeColumnDefinition(o)
change_column_sql
end
+ def visit_IndexDefinition(o, create = false)
+ sql = create ? ["ALTER TABLE #{quote_table_name(o.table)} ADD"] : []
+ sql << "INDEX"
+ sql << "IF NOT EXISTS" if o.if_not_exists
+ sql << "IF EXISTS" if o.if_exists
+ sql << "#{quote_column_name(o.name)} (#{o.expression}) TYPE #{o.type}"
+ sql << "GRANULARITY #{o.granularity}" if o.granularity
+ sql << "FIRST #{quote_column_name(o.first)}" if o.first
+ sql << "AFTER #{quote_column_name(o.after)}" if o.after
+
+ sql.join(' ')
+ end
+
+ def visit_CreateIndexDefinition(o)
+ visit_IndexDefinition(o.index, true)
+ end
+
def current_database
- if ActiveRecord::version >= Gem::Version.new('6')
- ActiveRecord::Base.connection_db_config.database
- else
- ActiveRecord::Base.connection_config[:database]
- end
+ ActiveRecord::Base.connection_db_config.database
end
end
end
diff --git a/lib/active_record/connection_adapters/clickhouse/schema_statements.rb b/lib/active_record/connection_adapters/clickhouse/schema_statements.rb
index 69cc42c7..9dfe9733 100644
--- a/lib/active_record/connection_adapters/clickhouse/schema_statements.rb
+++ b/lib/active_record/connection_adapters/clickhouse/schema_statements.rb
@@ -6,19 +6,30 @@ module ActiveRecord
module ConnectionAdapters
module Clickhouse
module SchemaStatements
+ DEFAULT_RESPONSE_FORMAT = 'JSONCompactEachRowWithNamesAndTypes'.freeze
+
+ DB_EXCEPTION_REGEXP = /\ACode:\s+\d+\.\s+DB::Exception:/.freeze
+
def execute(sql, name = nil, settings: {})
do_execute(sql, name, settings: settings)
end
- def exec_insert(sql, name, _binds, _pk = nil, _sequence_name = nil)
+ def exec_insert(sql, name, _binds, _pk = nil, _sequence_name = nil, returning: nil)
new_sql = sql.dup.sub(/ (DEFAULT )?VALUES/, " VALUES")
do_execute(new_sql, name, format: nil)
true
end
- def exec_query(sql, name = nil, binds = [], prepare: false)
+ def internal_exec_query(sql, name = nil, binds = [], prepare: false, async: false, allow_retry: false)
result = do_execute(sql, name)
- ActiveRecord::Result.new(result['meta'].map { |m| m['name'] }, result['data'])
+ columns = result['meta'].map { |m| m['name'] }
+ types = {}
+ result['meta'].each_with_index do |m, i|
+ # need use column name and index after commit in 7.2:
+ # https://github.com/rails/rails/commit/24dbf7637b1d5cd6eb3d7100b8d0f6872c3fee3c
+ types[m['name']] = types[i] = type_map.lookup(m['type'])
+ end
+ ActiveRecord::Result.new(columns, result['data'], types)
rescue ActiveRecord::ActiveRecordError => e
raise e
rescue StandardError => e
@@ -30,12 +41,23 @@ def exec_insert_all(sql, name)
true
end
+ # @link https://clickhouse.com/docs/en/sql-reference/statements/alter/update
def exec_update(_sql, _name = nil, _binds = [])
- raise ActiveRecord::ActiveRecordError, 'Clickhouse update is not supported'
+ do_execute(_sql, _name, format: nil)
+ 0
end
+ # @link https://clickhouse.com/docs/en/sql-reference/statements/delete
def exec_delete(_sql, _name = nil, _binds = [])
- raise ActiveRecord::ActiveRecordError, 'Clickhouse delete is not supported'
+ log(_sql, "#{adapter_name} #{_name}") do
+ res = request(_sql)
+ begin
+ data = JSON.parse(res.header['x-clickhouse-summary'])
+ data['result_rows'].to_i
+ rescue JSONError
+ 0
+ end
+ end
end
def tables(name = nil)
@@ -44,6 +66,28 @@ def tables(name = nil)
result['data'].flatten
end
+ def views(name = nil)
+ result = do_system_execute("SHOW TABLES WHERE engine = 'View'", name)
+ return [] if result.nil?
+ result['data'].flatten
+ end
+
+ def materialized_views(name = nil)
+ result = do_system_execute("SHOW TABLES WHERE engine = 'MaterializedView'", name)
+ return [] if result.nil?
+ result['data'].flatten
+ end
+
+ def functions
+ result = do_system_execute("SELECT name FROM system.functions WHERE origin = 'SQLUserDefined' ORDER BY name")
+ return [] if result.nil?
+ result['data'].flatten
+ end
+
+ def show_create_function(function)
+ do_execute("SELECT create_query FROM system.functions WHERE origin = 'SQLUserDefined' AND name = '#{function}'", format: nil)
+ end
+
def table_options(table)
sql = show_create_table(table)
{ options: sql.gsub(/^(?:.*?)(?:ENGINE = (.*?))?( AS SELECT .*?)?$/, '\\1').presence, as: sql.match(/^CREATE (?:.*?) AS (SELECT .*?)$/).try(:[], 1) }.compact
@@ -54,25 +98,43 @@ def indexes(table_name, name = nil)
[]
end
+ def add_index_options(table_name, expression, **options)
+ options.assert_valid_keys(:name, :type, :granularity, :first, :after, :if_not_exists, :if_exists)
+
+ validate_index_length!(table_name, options[:name])
+
+ IndexDefinition.new(table_name, options[:name], expression, options[:type], options[:granularity], first: options[:first], after: options[:after], if_not_exists: options[:if_not_exists], if_exists: options[:if_exists])
+ end
+
def data_sources
tables
end
def do_system_execute(sql, name = nil)
log_with_debug(sql, "#{adapter_name} #{name}") do
- res = @connection.post("/?#{@config.to_param}", "#{sql} FORMAT JSONCompact", 'User-Agent' => "Clickhouse ActiveRecord #{ClickhouseActiverecord::VERSION}")
-
- process_response(res)
+ res = request(sql, DEFAULT_RESPONSE_FORMAT)
+ process_response(res, DEFAULT_RESPONSE_FORMAT, sql)
end
end
- def do_execute(sql, name = nil, format: 'JSONCompact', settings: {})
+ def do_execute(sql, name = nil, format: DEFAULT_RESPONSE_FORMAT, settings: {})
log(sql, "#{adapter_name} #{name}") do
- formatted_sql = apply_format(sql, format)
- request_params = @config || {}
- res = @connection.post("/?#{request_params.merge(settings).to_param}", formatted_sql, 'User-Agent' => "Clickhouse ActiveRecord #{ClickhouseActiverecord::VERSION}")
+ res = request(sql, format, settings)
+ process_response(res, format, sql)
+ end
+ end
+
+ if ::ActiveRecord::version >= Gem::Version.new('7.2')
+ def schema_migration
+ pool.schema_migration
+ end
- process_response(res)
+ def migration_context
+ pool.migration_context
+ end
+
+ def internal_metadata
+ pool.internal_metadata
end
end
@@ -92,23 +154,59 @@ def assume_migrated_upto_version(version, migrations_paths = nil)
if (duplicate = inserting.detect { |v| inserting.count(v) > 1 })
raise "Duplicate migration #{duplicate}. Please renumber your migrations to resolve the conflict."
end
- do_execute(insert_versions_sql(inserting), nil, settings: {max_partitions_per_insert_block: [100, inserting.size].max})
+ do_execute(insert_versions_sql(inserting), nil, format: nil, settings: {max_partitions_per_insert_block: [100, inserting.size].max})
+ end
+ end
+
+ # Fix insert_all method
+ # https://github.com/PNixx/clickhouse-activerecord/issues/71#issuecomment-1923244983
+ def with_yaml_fallback(value) # :nodoc:
+ if value.is_a?(Array) || value.is_a?(Hash)
+ value
+ else
+ super
end
end
private
+ # Make HTTP request to ClickHouse server
+ # @param [String] sql
+ # @param [String, nil] format
+ # @param [Hash] settings
+ # @return [Net::HTTPResponse]
+ def request(sql, format = nil, settings = {})
+ formatted_sql = apply_format(sql, format)
+ request_params = @connection_config || {}
+ @connection.post("/?#{request_params.merge(settings).to_param}", formatted_sql, {
+ 'User-Agent' => "Clickhouse ActiveRecord #{ClickhouseActiverecord::VERSION}",
+ 'Content-Type' => 'application/x-www-form-urlencoded',
+ })
+ end
+
def apply_format(sql, format)
format ? "#{sql} FORMAT #{format}" : sql
end
- def process_response(res)
+ def process_response(res, format, sql = nil)
case res.code.to_i
when 200
- res.body.presence && JSON.parse(res.body)
+ body = res.body
+
+ if body.include?("DB::Exception") && body.match?(DB_EXCEPTION_REGEXP)
+ raise ActiveRecord::ActiveRecordError, "Response code: #{res.code}:\n#{res.body}#{sql ? "\nQuery: #{sql}" : ''}"
+ else
+ format_body_response(res.body, format)
+ end
else
- raise ActiveRecord::ActiveRecordError,
- "Response code: #{res.code}:\n#{res.body}"
+ case res.body
+ when /DB::Exception:.*\(UNKNOWN_DATABASE\)/
+ raise ActiveRecord::NoDatabaseError
+ when /DB::Exception:.*\(DATABASE_ALREADY_EXISTS\)/
+ raise ActiveRecord::DatabaseAlreadyExists
+ else
+ raise ActiveRecord::ActiveRecordError, "Response code: #{res.code}:\n#{res.body}"
+ end
end
rescue JSON::ParserError
res.body
@@ -127,17 +225,13 @@ def create_table_definition(table_name, **options)
Clickhouse::TableDefinition.new(self, table_name, **options)
end
- def new_column_from_field(table_name, field)
+ def new_column_from_field(table_name, field, _definitions)
sql_type = field[1]
type_metadata = fetch_type_metadata(sql_type)
- default = field[3]
- default_value = extract_value_from_default(default)
- default_function = extract_default_function(default_value, default)
- if ActiveRecord::version >= Gem::Version.new('6')
- ClickhouseColumn.new(field[0], default_value, type_metadata, field[1].include?('Nullable'), default_function)
- else
- ClickhouseColumn.new(field[0], default_value, type_metadata, field[1].include?('Nullable'), table_name, default_function)
- end
+ default_value = extract_value_from_default(field[3], field[2])
+ default_function = extract_default_function(field[3])
+ default_value = lookup_cast_type(sql_type).cast(default_value)
+ Clickhouse::Column.new(field[0], default_value, type_metadata, field[1].include?('Nullable'), default_function, codec: field[5].presence)
end
protected
@@ -148,40 +242,67 @@ def table_structure(table_name)
return data unless data.empty?
- raise ActiveRecord::StatementInvalid,
- "Could not find table '#{table_name}'"
+ raise ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'"
end
alias column_definitions table_structure
private
# Extracts the value from a PostgreSQL column default definition.
- def extract_value_from_default(default)
- case default
- # Quoted types
- when /\Anow\(\)\z/m
- nil
- # Boolean types
- when "true".freeze, "false".freeze
- default
- # Object identifier types
- when "''"
- ''
- when /\A-?\d+\z/
- $1
+ def extract_value_from_default(default_expression, default_type)
+ return nil if default_type != 'DEFAULT' || default_expression.blank?
+ return nil if has_default_function?(default_expression)
+
+ # Convert string
+ return $1 if default_expression.match(/^'(.*?)'$/)
+
+ default_expression
+ end
+
+ def extract_default_function(default) # :nodoc:
+ default if has_default_function?(default)
+ end
+
+ def has_default_function?(default) # :nodoc:
+ (%r{\w+\(.*\)} === default)
+ end
+
+ def format_body_response(body, format)
+ return body if body.blank?
+
+ case format
+ when 'JSONCompact'
+ format_from_json_compact(body)
+ when 'JSONCompactEachRowWithNamesAndTypes'
+ format_from_json_compact_each_row_with_names_and_types(body)
else
- # Anything else is blank, some user type, or some function
- # and we can't know the value of that, so return nil.
- nil
+ body
end
end
- def extract_default_function(default_value, default) # :nodoc:
- default if has_default_function?(default_value, default)
+ def format_from_json_compact(body)
+ parse_json_payload(body)
+ end
+
+ def format_from_json_compact_each_row_with_names_and_types(body)
+ rows = body.split("\n").map { |row| parse_json_payload(row) }
+ names, types, *data = rows
+
+ meta = names.zip(types).map do |name, type|
+ {
+ 'name' => name,
+ 'type' => type
+ }
+ end
+
+ {
+ 'meta' => meta,
+ 'data' => data
+ }
end
- def has_default_function?(default_value, default) # :nodoc:
- !default_value && (%r{\w+\(.*\)} === default)
+ def parse_json_payload(payload)
+ JSON.parse(payload, decimal_class: BigDecimal)
end
end
end
diff --git a/lib/active_record/connection_adapters/clickhouse/schema_definitions.rb b/lib/active_record/connection_adapters/clickhouse/table_definition.rb
similarity index 75%
rename from lib/active_record/connection_adapters/clickhouse/schema_definitions.rb
rename to lib/active_record/connection_adapters/clickhouse/table_definition.rb
index 54b89593..e65f8fd5 100644
--- a/lib/active_record/connection_adapters/clickhouse/schema_definitions.rb
+++ b/lib/active_record/connection_adapters/clickhouse/table_definition.rb
@@ -68,10 +68,9 @@ def datetime(*args, **options)
if options[:precision]
kind = :datetime64
- options[:value] = options[:precision]
end
- args.each { |name| column(name, kind, **options.except(:precision)) }
+ args.each { |name| column(name, kind, **options) }
end
def uuid(*args, **options)
@@ -94,6 +93,34 @@ def enum(*args, **options)
args.each { |name| column(name, kind, **options.except(:limit)) }
end
+
+ def column(name, type, index: nil, **options)
+ options[:null] = false if type.match?(/Nullable\([^)]+\)/)
+ super(name, type, index: index, **options)
+ end
+
+ private
+
+ def valid_column_definition_options
+ super + [:array, :low_cardinality, :fixed_string, :value, :type, :map, :codec, :unsigned]
+ end
+ end
+
+ class IndexDefinition
+ attr_reader :table, :name, :expression, :type, :granularity, :first, :after, :if_exists, :if_not_exists
+
+ def initialize(table, name, expression, type, granularity, first:, after:, if_exists:, if_not_exists:)
+ @table = table
+ @name = name
+ @expression = expression
+ @type = type
+ @granularity = granularity
+ @first = first
+ @after = after
+ @if_exists = if_exists
+ @if_not_exists = if_not_exists
+ end
+
end
end
end
diff --git a/lib/active_record/connection_adapters/clickhouse_adapter.rb b/lib/active_record/connection_adapters/clickhouse_adapter.rb
index 696b65ac..7e6d40c3 100644
--- a/lib/active_record/connection_adapters/clickhouse_adapter.rb
+++ b/lib/active_record/connection_adapters/clickhouse_adapter.rb
@@ -1,15 +1,22 @@
# frozen_string_literal: true
-require 'clickhouse-activerecord/arel/visitors/to_sql'
-require 'clickhouse-activerecord/arel/table'
-require 'clickhouse-activerecord/migration'
+require 'arel/visitors/clickhouse'
+require 'arel/nodes/final'
+require 'arel/nodes/grouping_sets'
+require 'arel/nodes/settings'
+require 'arel/nodes/using'
+require 'arel/nodes/limit_by'
require 'active_record/connection_adapters/clickhouse/oid/array'
require 'active_record/connection_adapters/clickhouse/oid/date'
require 'active_record/connection_adapters/clickhouse/oid/date_time'
require 'active_record/connection_adapters/clickhouse/oid/big_integer'
-require 'active_record/connection_adapters/clickhouse/schema_definitions'
+require 'active_record/connection_adapters/clickhouse/oid/map'
+require 'active_record/connection_adapters/clickhouse/oid/uuid'
+require 'active_record/connection_adapters/clickhouse/column'
+require 'active_record/connection_adapters/clickhouse/quoting'
require 'active_record/connection_adapters/clickhouse/schema_creation'
require 'active_record/connection_adapters/clickhouse/schema_statements'
+require 'active_record/connection_adapters/clickhouse/table_definition'
require 'net/http'
require 'openssl'
@@ -20,30 +27,11 @@ class << self
def clickhouse_connection(config)
config = config.symbolize_keys
- if config[:connection]
- connection = {
- connection: config[:connection]
- }
- else
- port = config[:port] || 8123
- connection = {
- host: config[:host] || 'localhost',
- port: port,
- ssl: config[:ssl].present? ? config[:ssl] : port == 443,
- sslca: config[:sslca],
- read_timeout: config[:read_timeout],
- write_timeout: config[:write_timeout],
- keep_alive_timeout: config[:keep_alive_timeout]
- }
- end
-
- if config.key?(:database)
- database = config[:database]
- else
+ unless config.key?(:database)
raise ArgumentError, 'No database specified. Missing argument: database.'
end
- ConnectionAdapters::ClickhouseAdapter.new(logger, connection, { user: config[:username], password: config[:password], database: database }.compact, config)
+ ConnectionAdapters::ClickhouseAdapter.new(config)
end
end
end
@@ -62,6 +50,13 @@ def is_view
module ModelSchema
module ClassMethods
+ delegate :final, :final!,
+ :group_by_grouping_sets, :group_by_grouping_sets!,
+ :settings, :settings!,
+ :window, :window!,
+ :limit_by, :limit_by!,
+ to: :all
+
def is_view
@is_view || false
end
@@ -70,18 +65,22 @@ def is_view=(value)
@is_view = value
end
- def arel_table # :nodoc:
- @arel_table ||= ClickhouseActiverecord::Arel::Table.new(table_name, type_caster: type_caster)
+ def _delete_record(constraints)
+ raise ActiveRecord::ActiveRecordError.new('Deleting a row is not possible without a primary key') unless self.primary_key
+ super
end
end
end
module ConnectionAdapters
- class ClickhouseColumn < Column
+ if ActiveRecord::version >= Gem::Version.new('7.2')
+ register "clickhouse", "ActiveRecord::ConnectionAdapters::ClickhouseAdapter", "active_record/connection_adapters/clickhouse_adapter"
end
class ClickhouseAdapter < AbstractAdapter
+ include Clickhouse::Quoting
+
ADAPTER_NAME = 'Clickhouse'.freeze
NATIVE_DATABASE_TYPES = {
string: { name: 'String' },
@@ -92,7 +91,7 @@ class ClickhouseAdapter < AbstractAdapter
datetime: { name: 'DateTime' },
datetime64: { name: 'DateTime64' },
date: { name: 'Date' },
- boolean: { name: 'UInt8' },
+ boolean: { name: 'Bool' },
uuid: { name: 'UUID' },
enum8: { name: 'Enum8' },
@@ -116,36 +115,55 @@ class ClickhouseAdapter < AbstractAdapter
include Clickhouse::SchemaStatements
# Initializes and connects a Clickhouse adapter.
- def initialize(logger, connection_parameters, config, full_config)
- super(nil, logger)
- @connection_parameters = connection_parameters
- @config = config
- @debug = full_config[:debug] || false
- @full_config = full_config
+ def initialize(config_or_deprecated_connection, deprecated_logger = nil, deprecated_connection_options = nil, deprecated_config = nil)
+ super
+ if @config[:connection]
+ connection = {
+ connection: @config[:connection]
+ }
+ else
+ port = @config[:port] || 8123
+ connection = {
+ host: @config[:host] || 'localhost',
+ port: port,
+ ssl: @config[:ssl].present? ? @config[:ssl] : port == 443,
+ sslca: @config[:sslca],
+ read_timeout: @config[:read_timeout],
+ write_timeout: @config[:write_timeout],
+ keep_alive_timeout: @config[:keep_alive_timeout]
+ }
+ end
+ @connection_parameters = connection
+
+ @connection_config = { user: @config[:username], password: @config[:password], database: @config[:database] }.compact
+ @debug = @config[:debug] || false
@prepared_statements = false
- if ActiveRecord::version == Gem::Version.new('6.0.0')
- @prepared_statement_status = Concurrent::ThreadLocalVar.new(false)
- end
connect
end
- # Support SchemaMigration from v5.2.2 to v6+
- def schema_migration # :nodoc:
- ClickhouseActiverecord::SchemaMigration
+ # Return ClickHouse server version
+ def server_version
+ @server_version ||= do_system_execute('SELECT version()')['data'][0][0]
end
- def migrations_paths
- @full_config[:migrations_paths] || 'db/migrate_clickhouse'
+ # Savepoints are not supported, noop
+ def create_savepoint(name)
end
- def migration_context # :nodoc:
- ClickhouseActiverecord::MigrationContext.new(migrations_paths, schema_migration)
+ def exec_rollback_to_savepoint(name)
+ end
+
+ def release_savepoint(name)
+ end
+
+ def migrations_paths
+ @config[:migrations_paths] || 'db/migrate_clickhouse'
end
def arel_visitor # :nodoc:
- ClickhouseActiverecord::Arel::Visitors::ToSql.new(self)
+ Arel::Visitors::Clickhouse.new(self)
end
def native_database_types #:nodoc:
@@ -156,66 +174,88 @@ def valid_type?(type)
!native_database_types[type].nil?
end
- def extract_limit(sql_type) # :nodoc:
- case sql_type
- when /(Nullable)?\(?String\)?/
- super('String')
- when /(Nullable)?\(?U?Int8\)?/
- 1
- when /(Nullable)?\(?U?Int16\)?/
- 2
- when /(Nullable)?\(?U?Int32\)?/
- nil
- when /(Nullable)?\(?U?Int64\)?/
- 8
- else
- super
- end
+ def supports_indexes_in_create?
+ true
end
- # `extract_scale` and `extract_precision` are the same as in the Rails abstract base class,
- # except this permits a space after the comma
+ class << self
+ def extract_limit(sql_type) # :nodoc:
+ case sql_type
+ when /(Nullable)?\(?String\)?/
+ super('String')
+ when /(Nullable)?\(?U?Int8\)?/
+ 1
+ when /(Nullable)?\(?U?Int16\)?/
+ 2
+ when /(Nullable)?\(?U?Int32\)?/
+ nil
+ when /(Nullable)?\(?U?Int64\)?/
+ 8
+ when /(Nullable)?\(?U?Int128\)?/
+ 16
+ else
+ super
+ end
+ end
+
+ # `extract_scale` and `extract_precision` are the same as in the Rails abstract base class,
+ # except this permits a space after the comma
- def extract_scale(sql_type)
- case sql_type
- when /\((\d+)\)/ then 0
- when /\((\d+)(,\s?(\d+))\)/ then $3.to_i
+ def extract_scale(sql_type)
+ case sql_type
+ when /\((\d+)\)/ then 0
+ when /\((\d+)(,\s?(\d+))\)/ then $3.to_i
+ end
end
- end
- def extract_precision(sql_type)
- $1.to_i if sql_type =~ /\((\d+)(,\s?\d+)?\)/
- end
+ def extract_precision(sql_type)
+ $1.to_i if sql_type =~ /\((\d+)(,\s?\d+)?\)/
+ end
- def initialize_type_map(m) # :nodoc:
- super
- register_class_with_limit m, %r(String), Type::String
- register_class_with_limit m, 'Date', Clickhouse::OID::Date
- register_class_with_limit m, 'DateTime', Clickhouse::OID::DateTime
-
- register_class_with_limit m, %r(Int8), Type::Integer
- register_class_with_limit m, %r(Int16), Type::Integer
- register_class_with_limit m, %r(Int32), Type::Integer
- register_class_with_limit m, %r(Int64), Type::Integer
- register_class_with_limit m, %r(Int128), Type::Integer
- register_class_with_limit m, %r(Int256), Type::Integer
-
- register_class_with_limit m, %r(UInt8), Type::UnsignedInteger
- register_class_with_limit m, %r(UInt16), Type::UnsignedInteger
- register_class_with_limit m, %r(UInt32), Type::UnsignedInteger
- register_class_with_limit m, %r(UInt64), Type::UnsignedInteger
- #register_class_with_limit m, %r(UInt128), Type::UnsignedInteger #not implemnted in clickhouse
- register_class_with_limit m, %r(UInt256), Type::UnsignedInteger
- # register_class_with_limit m, %r(Array), Clickhouse::OID::Array
- m.register_type(%r(Array)) do |sql_type|
- Clickhouse::OID::Array.new(sql_type)
+ def initialize_type_map(m) # :nodoc:
+ super
+ register_class_with_limit m, %r(String), Type::String
+ register_class_with_limit m, 'Date', Clickhouse::OID::Date
+ register_class_with_precision m, %r(datetime)i, Clickhouse::OID::DateTime
+
+ register_class_with_limit m, %r(Int8), Type::Integer
+ register_class_with_limit m, %r(Int16), Type::Integer
+ register_class_with_limit m, %r(Int32), Type::Integer
+ register_class_with_limit m, %r(Int64), Type::Integer
+ register_class_with_limit m, %r(Int128), Type::Integer
+ register_class_with_limit m, %r(Int256), Type::Integer
+
+ register_class_with_limit m, %r(UInt8), Type::UnsignedInteger
+ register_class_with_limit m, %r(UInt16), Type::UnsignedInteger
+ register_class_with_limit m, %r(UInt32), Type::UnsignedInteger
+ register_class_with_limit m, %r(UInt64), Type::UnsignedInteger
+ #register_class_with_limit m, %r(UInt128), Type::UnsignedInteger #not implemnted in clickhouse
+ register_class_with_limit m, %r(UInt256), Type::UnsignedInteger
+
+ m.register_type %r(bool)i, ActiveModel::Type::Boolean.new
+ m.register_type %r{uuid}i, Clickhouse::OID::Uuid.new
+ # register_class_with_limit m, %r(Array), Clickhouse::OID::Array
+ m.register_type(%r(Array)) do |sql_type|
+ Clickhouse::OID::Array.new(sql_type)
+ end
+
+ m.register_type(%r(Map)) do |sql_type|
+ Clickhouse::OID::Map.new(sql_type)
+ end
end
end
- def _quote(value)
+ # In Rails 7 used constant TYPE_MAP, we need redefine method
+ def type_map
+ @type_map ||= Type::TypeMap.new.tap { |m| ClickhouseAdapter.initialize_type_map(m) }
+ end
+
+ def quote(value)
case value
when Array
- '[' + value.map { |v| _quote(v) }.join(', ') + ']'
+ '[' + value.map { |v| quote(v) }.join(', ') + ']'
+ when Hash
+ '{' + value.map { |k, v| "#{quote(k)}: #{quote(v)}" }.join(', ') + '}'
else
super
end
@@ -224,30 +264,18 @@ def _quote(value)
# Quoting time without microseconds
def quoted_date(value)
if value.acts_like?(:time)
- if ActiveRecord::version >= Gem::Version.new('7')
- zone_conversion_method = ActiveRecord.default_timezone == :utc ? :getutc : :getlocal
- else
- zone_conversion_method = ActiveRecord::Base.default_timezone == :utc ? :getutc : :getlocal
- end
+ zone_conversion_method = ActiveRecord.default_timezone == :utc ? :getutc : :getlocal
if value.respond_to?(zone_conversion_method)
value = value.send(zone_conversion_method)
end
end
- if ActiveRecord::version >= Gem::Version.new('7')
- value.to_fs(:db)
- else
- value.to_s(:db)
- end
+ value.to_fs(:db)
end
def column_name_for_operation(operation, node) # :nodoc:
- if ActiveRecord::version >= Gem::Version.new('6')
- visitor.compile(node)
- else
- column_name_from_arel_node(node)
- end
+ visitor.compile(node)
end
# Executes insert +sql+ statement in the context of this connection using
@@ -256,10 +284,15 @@ def column_name_for_operation(operation, node) # :nodoc:
# SCHEMA STATEMENTS ========================================
- def primary_key(table_name) #:nodoc:
+ def primary_keys(table_name)
+ if server_version.to_f >= 23.4
+ structure = do_system_execute("SHOW COLUMNS FROM `#{table_name}`")
+ return structure['data'].select {|m| m[3]&.include?('PRI') }.pluck(0)
+ end
+
pk = table_structure(table_name).first
- return 'id' if pk.present? && pk[0] == 'id'
- false
+ return ['id'] if pk.present? && pk[0] == 'id'
+ []
end
def create_schema_dumper(options) # :nodoc:
@@ -267,21 +300,23 @@ def create_schema_dumper(options) # :nodoc:
end
# @param [String] table
+ # @option [Boolean] single_line
# @return [String]
- def show_create_table(table)
- do_system_execute("SHOW CREATE TABLE `#{table}`")['data'].try(:first).try(:first).gsub(/[\n\s]+/m, ' ')
+ def show_create_table(table, single_line: true)
+ sql = do_system_execute("SHOW CREATE TABLE `#{table}`")['data'].try(:first).try(:first)
+ single_line ? sql.squish : sql
end
# Create a new ClickHouse database.
def create_database(name)
sql = apply_cluster "CREATE DATABASE #{quote_table_name(name)}"
log_with_debug(sql, adapter_name) do
- res = @connection.post("/?#{@config.except(:database).to_param}", sql)
- process_response(res)
+ res = @connection.post("/?#{@connection_config.except(:database).to_param}", sql)
+ process_response(res, DEFAULT_RESPONSE_FORMAT)
end
end
- def create_view(table_name, **options)
+ def create_view(table_name, request_settings: {}, **options)
options.merge!(view: true)
options = apply_replica(table_name, options)
td = create_table_definition(apply_cluster(table_name), **options)
@@ -291,20 +326,24 @@ def create_view(table_name, **options)
drop_table(table_name, options.merge(if_exists: true))
end
- do_execute(schema_creation.accept(td), format: nil)
+ do_execute(schema_creation.accept(td), format: nil, settings: request_settings)
end
- def create_table(table_name, **options, &block)
+ def create_table(table_name, request_settings: {}, **options, &block)
options = apply_replica(table_name, **options)
td = create_table_definition(apply_cluster(table_name), **options)
block.call td if block_given?
- td.column(:id, options[:id], null: false) if options[:id].present? && td[:id].blank?
+ # support old migration version: in 5.0 options id: :integer, but 7.1 options empty
+ # todo remove auto add id column in future
+ if (!options.key?(:id) || options[:id].present? && options[:id] != false) && td[:id].blank? && options[:as].blank?
+ td.column(:id, options[:id] || :integer, null: false)
+ end
if options[:force]
drop_table(table_name, options.merge(if_exists: true))
end
- do_execute(schema_creation.accept(td), format: nil)
+ do_execute(schema_creation.accept(td), format: nil, settings: request_settings)
if options[:with_distributed]
distributed_table_name = options.delete(:with_distributed)
@@ -312,17 +351,28 @@ def create_table(table_name, **options, &block)
raise 'Set a cluster' unless cluster
distributed_options =
- "Distributed(#{cluster}, #{@config[:database]}, #{table_name}, #{sharding_key})"
+ "Distributed(#{cluster}, #{@connection_config[:database]}, #{table_name}, #{sharding_key})"
create_table(distributed_table_name, **options.merge(options: distributed_options), &block)
end
end
+ def create_function(name, body, **options)
+ fd = "CREATE#{' OR REPLACE' if options[:force]} FUNCTION #{apply_cluster(quote_table_name(name))} AS #{body}"
+ do_execute(fd, format: nil)
+ end
+
# Drops a ClickHouse database.
def drop_database(name) #:nodoc:
sql = apply_cluster "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
log_with_debug(sql, adapter_name) do
- res = @connection.post("/?#{@config.except(:database).to_param}", sql)
- process_response(res)
+ res = @connection.post("/?#{@connection_config.except(:database).to_param}", sql)
+ process_response(res, DEFAULT_RESPONSE_FORMAT)
+ end
+ end
+
+ def drop_functions
+ functions.each do |function|
+ drop_function(function)
end
end
@@ -331,7 +381,13 @@ def rename_table(table_name, new_name)
end
def drop_table(table_name, **options) # :nodoc:
- do_execute apply_cluster "DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}"
+ query = "DROP TABLE"
+ query = "#{query} IF EXISTS " if options[:if_exists]
+ query = "#{query} #{quote_table_name(table_name)}"
+ query = apply_cluster(query)
+ query = "#{query} SYNC" if options[:sync]
+
+ do_execute(query)
if options[:with_distributed]
distributed_table_name = options.delete(:with_distributed)
@@ -339,6 +395,16 @@ def drop_table(table_name, **options) # :nodoc:
end
end
+ def drop_function(name, options = {})
+ query = "DROP FUNCTION"
+ query = "#{query} IF EXISTS " if options[:if_exists]
+ query = "#{query} #{quote_table_name(name)}"
+ query = apply_cluster(query)
+ query = "#{query} SYNC" if options[:sync]
+
+ do_execute(query, format: nil)
+ end
+
def add_column(table_name, column_name, type, **options)
return if options[:if_not_exists] == true && column_exists?(table_name, column_name, type)
@@ -368,16 +434,53 @@ def change_column_default(table_name, column_name, default)
change_column table_name, column_name, nil, {default: default}.compact
end
+ # Adds index description to tables metadata
+ # @link https://clickhouse.com/docs/en/sql-reference/statements/alter/skipping-index
+ def add_index(table_name, expression, **options)
+ index = add_index_options(apply_cluster(table_name), expression, **options)
+ execute schema_creation.accept(CreateIndexDefinition.new(index))
+ end
+
+ # Removes index description from tables metadata and deletes index files from disk
+ def remove_index(table_name, name)
+ query = apply_cluster("ALTER TABLE #{quote_table_name(table_name)}")
+ execute "#{query} DROP INDEX #{quote_column_name(name)}"
+ end
+
+ # Rebuilds the secondary index name for the specified partition_name
+ def rebuild_index(table_name, name, if_exists: false, partition: nil)
+ query = [apply_cluster("ALTER TABLE #{quote_table_name(table_name)}")]
+ query << 'MATERIALIZE INDEX'
+ query << 'IF EXISTS' if if_exists
+ query << quote_column_name(name)
+ query << "IN PARTITION #{quote_column_name(partition)}" if partition
+ execute query.join(' ')
+ end
+
+ # Deletes the secondary index files from disk without removing description
+ def clear_index(table_name, name, if_exists: false, partition: nil)
+ query = [apply_cluster("ALTER TABLE #{quote_table_name(table_name)}")]
+ query << 'CLEAR INDEX'
+ query << 'IF EXISTS' if if_exists
+ query << quote_column_name(name)
+ query << "IN PARTITION #{quote_column_name(partition)}" if partition
+ execute query.join(' ')
+ end
+
def cluster
- @full_config[:cluster_name]
+ @config[:cluster_name]
end
def replica
- @full_config[:replica_name]
+ @config[:replica_name]
+ end
+
+ def database
+ @config[:database]
end
def use_default_replicated_merge_tree_params?
- database_engine_atomic? && @full_config[:use_default_replicated_merge_tree_params]
+ database_engine_atomic? && @config[:use_default_replicated_merge_tree_params]
end
def use_replica?
@@ -385,11 +488,11 @@ def use_replica?
end
def replica_path(table)
- "/clickhouse/tables/#{cluster}/#{@config[:database]}.#{table}"
+ "/clickhouse/tables/#{cluster}/#{@connection_config[:database]}.#{table}"
end
def database_engine_atomic?
- current_database_engine = "select engine from system.databases where name = '#{@config[:database]}'"
+ current_database_engine = "select engine from system.databases where name = '#{@connection_config[:database]}'"
res = select_one(current_database_engine)
res['engine'] == 'Atomic' if res
end
@@ -444,6 +547,10 @@ def connect
@connection
end
+ def reconnect
+ connect
+ end
+
def apply_replica(table, **options)
if use_replica? && options[:options]
if options[:options].match(/^Replicated/)
diff --git a/lib/arel/nodes/final.rb b/lib/arel/nodes/final.rb
new file mode 100644
index 00000000..664a859f
--- /dev/null
+++ b/lib/arel/nodes/final.rb
@@ -0,0 +1,7 @@
+module Arel # :nodoc: all
+ module Nodes
+ class Final < Arel::Nodes::Unary
+ delegate :empty?, to: :expr
+ end
+ end
+end
diff --git a/lib/arel/nodes/grouping_sets.rb b/lib/arel/nodes/grouping_sets.rb
new file mode 100644
index 00000000..631794eb
--- /dev/null
+++ b/lib/arel/nodes/grouping_sets.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+module Arel # :nodoc: all
+ module Nodes
+ class GroupingSets < Arel::Nodes::Unary
+
+ def initialize(expr)
+ super
+ @expr = wrap_grouping_sets(expr)
+ end
+
+ private
+
+ def wrap_grouping_sets(sets)
+ sets.map do |element|
+ # See Arel::SelectManager#group
+ case element
+ when Array
+ wrap_grouping_sets(element)
+ when String
+ ::Arel::Nodes::SqlLiteral.new(element)
+ when Symbol
+ ::Arel::Nodes::SqlLiteral.new(element.to_s)
+ else
+ element
+ end
+ end
+ end
+
+ end
+ end
+end
\ No newline at end of file
diff --git a/lib/arel/nodes/limit_by.rb b/lib/arel/nodes/limit_by.rb
new file mode 100644
index 00000000..6901f2da
--- /dev/null
+++ b/lib/arel/nodes/limit_by.rb
@@ -0,0 +1,17 @@
+module Arel # :nodoc: all
+ module Nodes
+ class LimitBy < Arel::Nodes::Unary
+ attr_reader :column
+
+ def initialize(limit, column)
+ raise ArgumentError, 'Limit should be an integer' unless limit.is_a?(Integer)
+ raise ArgumentError, 'Limit should be a positive integer' unless limit >= 0
+ raise ArgumentError, 'Column should be a Symbol or String' unless column.is_a?(String) || column.is_a?(Symbol)
+
+ @column = column
+
+ super(limit)
+ end
+ end
+ end
+end
diff --git a/lib/arel/nodes/settings.rb b/lib/arel/nodes/settings.rb
new file mode 100644
index 00000000..deda588f
--- /dev/null
+++ b/lib/arel/nodes/settings.rb
@@ -0,0 +1,11 @@
+module Arel # :nodoc: all
+ module Nodes
+ class Settings < Arel::Nodes::Unary
+ def initialize(expr)
+ raise ArgumentError, 'Settings must be a Hash' unless expr.is_a?(Hash)
+
+ super
+ end
+ end
+ end
+end
diff --git a/lib/arel/nodes/using.rb b/lib/arel/nodes/using.rb
new file mode 100644
index 00000000..a3ced8dd
--- /dev/null
+++ b/lib/arel/nodes/using.rb
@@ -0,0 +1,6 @@
+module Arel # :nodoc: all
+ module Nodes
+ class Using < Arel::Nodes::Unary
+ end
+ end
+end
diff --git a/lib/arel/visitors/clickhouse.rb b/lib/arel/visitors/clickhouse.rb
new file mode 100644
index 00000000..eb369503
--- /dev/null
+++ b/lib/arel/visitors/clickhouse.rb
@@ -0,0 +1,140 @@
+require 'arel/visitors/to_sql'
+
+module Arel
+ module Visitors
+ class Clickhouse < ::Arel::Visitors::ToSql
+
+ def compile(node, collector = Arel::Collectors::SQLString.new)
+ @delete_or_update = false
+ super
+ end
+
+ def aggregate(name, o, collector)
+ # replacing function name for materialized view
+ if o.expressions.first && o.expressions.first != '*' && !o.expressions.first.is_a?(String) && o.expressions.first.relation&.is_view
+ super("#{name.downcase}Merge", o, collector)
+ else
+ super
+ end
+ end
+
+ # https://clickhouse.com/docs/en/sql-reference/statements/delete
+ # DELETE and UPDATE in ClickHouse working only without table name
+ def visit_Arel_Attributes_Attribute(o, collector)
+ unless @delete_or_update
+ join_name = o.relation.table_alias || o.relation.name
+ collector << quote_table_name(join_name) << '.'
+ end
+ collector << quote_column_name(o.name)
+ end
+
+ def visit_Arel_Nodes_SelectOptions(o, collector)
+ maybe_visit o.limit_by, collector
+ maybe_visit o.settings, super
+ end
+
+ def visit_Arel_Nodes_UpdateStatement(o, collector)
+ @delete_or_update = true
+ o = prepare_update_statement(o)
+
+ collector << 'ALTER TABLE '
+ collector = visit o.relation, collector
+ collect_nodes_for o.values, collector, ' UPDATE '
+ collect_nodes_for o.wheres, collector, ' WHERE ', ' AND '
+ collect_nodes_for o.orders, collector, ' ORDER BY '
+ maybe_visit o.limit, collector
+ end
+
+ def visit_Arel_Nodes_DeleteStatement(o, collector)
+ @delete_or_update = true
+ super
+ end
+
+ def visit_Arel_Nodes_Final(o, collector)
+ visit o.expr, collector
+ collector << ' FINAL'
+ collector
+ end
+
+ def visit_Arel_Nodes_GroupingSets(o, collector)
+ collector << 'GROUPING SETS '
+ grouping_array_or_grouping_element(o.expr, collector)
+ end
+
+ def visit_Arel_Nodes_Settings(o, collector)
+ return collector if o.expr.empty?
+
+ collector << "SETTINGS "
+ o.expr.each_with_index do |(key, value), i|
+ collector << ", " if i > 0
+ collector << key.to_s.gsub(/\W+/, "")
+ collector << " = "
+ collector << sanitize_as_setting_value(value)
+ end
+ collector
+ end
+
+ def visit_Arel_Nodes_Using(o, collector)
+ collector << "USING "
+ visit o.expr, collector
+ collector
+ end
+
+ def visit_Arel_Nodes_LimitBy(o, collector)
+ collector << "LIMIT #{o.expr} BY #{o.column}"
+ collector
+ end
+
+ def visit_Arel_Nodes_Matches(o, collector)
+ op = o.case_sensitive ? " LIKE " : " ILIKE "
+ infix_value o, collector, op
+ end
+
+ def visit_Arel_Nodes_DoesNotMatch(o, collector)
+ op = o.case_sensitive ? " NOT LIKE " : " NOT ILIKE "
+ infix_value o, collector, op
+ end
+
+ def visit_Arel_Nodes_Rows(o, collector)
+ if o.expr.is_a?(String)
+ collector << "ROWS #{o.expr}"
+ else
+ super
+ end
+ end
+
+ def sanitize_as_setting_value(value)
+ if value == :default
+ 'DEFAULT'
+ else
+ quote(value)
+ end
+ end
+
+ def sanitize_as_setting_name(value)
+ return value if Arel::Nodes::SqlLiteral === value
+ @connection.sanitize_as_setting_name(value)
+ end
+
+ private
+
+ # Utilized by GroupingSet, Cube & RollUp visitors to
+ # handle grouping aggregation semantics
+ def grouping_array_or_grouping_element(o, collector)
+ if o.is_a? Array
+ collector << '( '
+ o.each_with_index do |el, i|
+ collector << ', ' if i > 0
+ grouping_array_or_grouping_element el, collector
+ end
+ collector << ' )'
+ elsif o.respond_to? :expr
+ visit o.expr, collector
+ else
+ visit o, collector
+ end
+ end
+
+ end
+ end
+end
diff --git a/lib/clickhouse-activerecord.rb b/lib/clickhouse-activerecord.rb
index e7a64800..40ce7ef7 100644
--- a/lib/clickhouse-activerecord.rb
+++ b/lib/clickhouse-activerecord.rb
@@ -2,10 +2,14 @@
require 'active_record/connection_adapters/clickhouse_adapter'
+require 'core_extensions/active_record/internal_metadata'
require 'core_extensions/active_record/relation'
-
-require_relative '../core_extensions/active_record/migration/command_recorder'
-ActiveRecord::Migration::CommandRecorder.include CoreExtensions::ActiveRecord::Migration::CommandRecorder
+require 'core_extensions/active_record/schema_migration'
+require 'core_extensions/active_record/migration/command_recorder'
+require 'core_extensions/arel/nodes/select_core'
+require 'core_extensions/arel/nodes/select_statement'
+require 'core_extensions/arel/select_manager'
+require 'core_extensions/arel/table'
if defined?(Rails::Railtie)
require 'clickhouse-activerecord/railtie'
@@ -17,6 +21,14 @@
module ClickhouseActiverecord
def self.load
+ ActiveRecord::InternalMetadata.prepend(CoreExtensions::ActiveRecord::InternalMetadata)
+ ActiveRecord::Migration::CommandRecorder.include(CoreExtensions::ActiveRecord::Migration::CommandRecorder)
ActiveRecord::Relation.prepend(CoreExtensions::ActiveRecord::Relation)
+ ActiveRecord::SchemaMigration.prepend(CoreExtensions::ActiveRecord::SchemaMigration)
+
+ Arel::Nodes::SelectCore.prepend(CoreExtensions::Arel::Nodes::SelectCore)
+ Arel::Nodes::SelectStatement.prepend(CoreExtensions::Arel::Nodes::SelectStatement)
+ Arel::SelectManager.prepend(CoreExtensions::Arel::SelectManager)
+ Arel::Table.prepend(CoreExtensions::Arel::Table)
end
end
diff --git a/lib/clickhouse-activerecord/arel/visitors/to_sql.rb b/lib/clickhouse-activerecord/arel/visitors/to_sql.rb
deleted file mode 100644
index 4b5648ae..00000000
--- a/lib/clickhouse-activerecord/arel/visitors/to_sql.rb
+++ /dev/null
@@ -1,20 +0,0 @@
-require 'arel/visitors/to_sql'
-
-module ClickhouseActiverecord
- module Arel
- module Visitors
- class ToSql < ::Arel::Visitors::ToSql
-
- def aggregate(name, o, collector)
- # replacing function name for materialized view
- if o.expressions.first && o.expressions.first != '*' && !o.expressions.first.is_a?(String) && o.expressions.first.relation&.is_view
- super("#{name.downcase}Merge", o, collector)
- else
- super
- end
- end
-
- end
- end
- end
-end
diff --git a/lib/clickhouse-activerecord/migration.rb b/lib/clickhouse-activerecord/migration.rb
deleted file mode 100644
index 18c25cce..00000000
--- a/lib/clickhouse-activerecord/migration.rb
+++ /dev/null
@@ -1,126 +0,0 @@
-require 'active_record/migration'
-
-module ClickhouseActiverecord
-
- class SchemaMigration < ::ActiveRecord::SchemaMigration
- class << self
-
- def create_table
- return if table_exists?
-
- version_options = connection.internal_string_options_for_primary_key
- table_options = {
- id: false, options: 'ReplacingMergeTree(ver) ORDER BY (version)', if_not_exists: true
- }
- full_config = connection.instance_variable_get(:@full_config) || {}
-
- if full_config[:distributed_service_tables]
- table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(version)')
-
- distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
- end
-
- connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
- t.string :version, **version_options
- t.column :active, 'Int8', null: false, default: '1'
- t.datetime :ver, null: false, default: -> { 'now()' }
- end
- end
-
- def all_versions
- from("#{table_name} FINAL").where(active: 1).order(:version).pluck(:version)
- end
- end
- end
-
- class InternalMetadata < ::ActiveRecord::InternalMetadata
- class << self
- def create_table
- return if table_exists?
-
- key_options = connection.internal_string_options_for_primary_key
- table_options = {
- id: false,
- options: connection.adapter_name.downcase == 'clickhouse' ? 'MergeTree() PARTITION BY toDate(created_at) ORDER BY (created_at)' : '',
- if_not_exists: true
- }
- full_config = connection.instance_variable_get(:@full_config) || {}
-
- if full_config[:distributed_service_tables]
- table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(created_at)')
-
- distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
- end
-
- connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
- t.string :key, **key_options
- t.string :value
- t.timestamps
- end
- end
- end
- end
-
- class MigrationContext < ::ActiveRecord::MigrationContext #:nodoc:
- attr_reader :migrations_paths, :schema_migration
-
- def initialize(migrations_paths, schema_migration)
- @migrations_paths = migrations_paths
- @schema_migration = schema_migration
- end
-
- def up(target_version = nil)
- selected_migrations = if block_given?
- migrations.select { |m| yield m }
- else
- migrations
- end
-
- ClickhouseActiverecord::Migrator.new(:up, selected_migrations, schema_migration, target_version).migrate
- end
-
- def down(target_version = nil)
- selected_migrations = if block_given?
- migrations.select { |m| yield m }
- else
- migrations
- end
-
- ClickhouseActiverecord::Migrator.new(:down, selected_migrations, schema_migration, target_version).migrate
- end
-
- def get_all_versions
- if schema_migration.table_exists?
- schema_migration.all_versions.map(&:to_i)
- else
- []
- end
- end
-
- end
-
- class Migrator < ::ActiveRecord::Migrator
-
- def initialize(direction, migrations, schema_migration, target_version = nil)
- @direction = direction
- @target_version = target_version
- @migrated_versions = nil
- @migrations = migrations
- @schema_migration = schema_migration
-
- validate(@migrations)
-
- @schema_migration.create_table
- ClickhouseActiverecord::InternalMetadata.create_table
- end
-
- def record_version_state_after_migrating(version)
- if down?
- migrated.delete(version)
- @schema_migration.create!(version: version.to_s, active: 0)
- else
- super
- end
- end
- end
-end
diff --git a/lib/clickhouse-activerecord/rspec.rb b/lib/clickhouse-activerecord/rspec.rb
new file mode 100644
index 00000000..dbcf4a0a
--- /dev/null
+++ b/lib/clickhouse-activerecord/rspec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+RSpec.configure do |config|
+ config.before do
+ ActiveRecord::Base.configurations.configurations.select { |x| x.env_name == Rails.env && x.adapter == 'clickhouse' }.each do |config|
+ ActiveRecord::Base.establish_connection(config)
+ ActiveRecord::Base.connection.tables.each do |table|
+ ActiveRecord::Base.connection.execute("TRUNCATE TABLE #{table}")
+ end
+ end
+ end
+end
diff --git a/lib/clickhouse-activerecord/schema.rb b/lib/clickhouse-activerecord/schema.rb
index 0a30da4e..cd674cac 100644
--- a/lib/clickhouse-activerecord/schema.rb
+++ b/lib/clickhouse-activerecord/schema.rb
@@ -1,19 +1,13 @@
# frozen_string_literal: true
module ClickhouseActiverecord
-
class Schema < ::ActiveRecord::Schema
-
- def define(info, &block) # :nodoc:
- instance_eval(&block)
-
- if info[:version].present?
- connection.schema_migration.create_table
- connection.assume_migrated_upto_version(info[:version], ClickhouseActiverecord::Migrator.migrations_paths)
- end
-
- ClickhouseActiverecord::InternalMetadata.create_table
- ClickhouseActiverecord::InternalMetadata[:environment] = connection.migration_context.current_environment
+ def define(...)
+ ActiveRecord.deprecator.warn(<<~MSG)
+ ClickhouseActiverecord::Schema is deprecated
+ and will be removed in 1.2 version. Use ActiveRecord::Schema instead.
+ MSG
+ super
end
end
end
diff --git a/lib/clickhouse-activerecord/schema_dumper.rb b/lib/clickhouse-activerecord/schema_dumper.rb
index 94981c6f..f244de47 100644
--- a/lib/clickhouse-activerecord/schema_dumper.rb
+++ b/lib/clickhouse-activerecord/schema_dumper.rb
@@ -14,29 +14,17 @@ def dump(connection = ActiveRecord::Base.connection, stream = STDOUT, config = A
private
- def header(stream)
- stream.puts < b }
+ functions = @connection.functions.sort
+ functions.each do |function|
+ function(function, stream)
+ end
- sorted_tables.each do |table_name|
+ view_tables = @connection.views.sort
+ materialized_view_tables = @connection.materialized_views.sort
+ sorted_tables = @connection.tables.sort - view_tables - materialized_view_tables
+
+ (sorted_tables + view_tables + materialized_view_tables).each do |table_name|
table(table_name, stream) unless ignored?(table_name)
end
end
@@ -50,7 +38,7 @@ def table(table, stream)
# super(table.gsub(/^\.inner\./, ''), stream)
# detect view table
- match = sql.match(/^CREATE\s+(MATERIALIZED\s+)?VIEW/)
+ view_match = sql.match(/^CREATE\s+(MATERIALIZED\s+)?VIEW\s+\S+\s+(?:TO (\S+))?/)
end
# Copy from original dumper
@@ -65,20 +53,16 @@ def table(table, stream)
unless simple
# Add materialize flag
- tbl.print ', view: true' if match
- tbl.print ', materialized: true' if match && match[1].presence
+ tbl.print ', view: true' if view_match
+ tbl.print ', materialized: true' if view_match && view_match[1].presence
+ tbl.print ", to: \"#{view_match[2]}\"" if view_match && view_match[2].presence
end
- case pk
- when String
- tbl.print ", primary_key: #{pk.inspect}" unless pk == "id"
- pkcol = columns.detect { |c| c.name == pk }
- pkcolspec = column_spec_for_primary_key(pkcol)
- if pkcolspec.present?
- tbl.print ", #{format_colspec(pkcolspec)}"
+ if (id = columns.detect { |c| c.name == 'id' })
+ spec = column_spec_for_primary_key(id)
+ if spec.present?
+ tbl.print ", #{format_colspec(spec)}"
end
- when Array
- tbl.print ", primary_key: #{pk.inspect}"
else
tbl.print ", id: false"
end
@@ -86,25 +70,34 @@ def table(table, stream)
unless simple
table_options = @connection.table_options(table)
if table_options.present?
- tbl.print ", #{format_options(table_options)}"
+ table_options = format_options(table_options)
+ table_options.gsub!(/Buffer\('[^']+'/, 'Buffer(\'#{connection.database}\'')
+ tbl.print ", #{table_options}"
end
end
tbl.puts ", force: :cascade do |t|"
# then dump all non-primary key columns
- if simple || !match
+ if simple || !view_match
columns.each do |column|
raise StandardError, "Unknown type '#{column.sql_type}' for column '#{column.name}'" unless @connection.valid_type?(column.type)
- next if column.name == pk
+ next if column.name == pk && column.name == "id"
type, colspec = column_spec(column)
- tbl.print " t.#{type} #{column.name.inspect}"
+ name = column.name =~ (/\./) ? "\"`#{column.name}`\"" : column.name.inspect
+ tbl.print " t.#{type} #{name}"
tbl.print ", #{format_colspec(colspec)}" if colspec.present?
tbl.puts
end
end
- indexes_in_create(table, tbl)
+ indexes = sql.scan(/INDEX \S+ \S+ TYPE .*? GRANULARITY \d+/)
+ if indexes.any?
+ tbl.puts ''
+ indexes.flatten.map!(&:strip).each do |index|
+ tbl.puts " t.index #{index_parts(index).join(', ')}"
+ end
+ end
tbl.puts " end"
tbl.puts
@@ -119,9 +112,28 @@ def table(table, stream)
end
end
+ def column_spec_for_primary_key(column)
+ spec = super
+
+ id = ActiveRecord::ConnectionAdapters::ClickhouseAdapter::NATIVE_DATABASE_TYPES.invert[{name: column.sql_type.gsub(/\(\d+\)/, "")}]
+ spec[:id] = id.inspect if id.present?
+
+ spec.except!(:limit, :unsigned) # This can be removed at some date, it is only here to clean up existing schemas which have dumped these values already
+ end
+
+ def function(function, stream)
+ stream.puts " # FUNCTION: #{function}"
+ sql = @connection.show_create_function(function)
+ if sql
+ stream.puts " # SQL: #{sql}"
+ stream.puts " create_function \"#{function}\", \"#{sql.gsub(/^CREATE FUNCTION (.*?) AS/, '').strip}\", force: true"
+ stream.puts
+ end
+ end
+
def format_options(options)
if options && options[:options]
- options[:options] = options[:options].gsub(/^Replicated(.*?)\('[^']+',\s*'[^']+',?\s?([^\)]*)?\)/, "\\1(\\2)")
+ options[:options].gsub!(/^Replicated(.*?)\('[^']+',\s*'[^']+',?\s?([^\)]*)?\)/, "\\1(\\2)")
end
super
end
@@ -145,14 +157,44 @@ def schema_unsigned(column)
end
def schema_array(column)
- (column.sql_type =~ /Array?\(/).nil? ? nil : true
+ (column.sql_type =~ /Array\(/).nil? ? nil : true
+ end
+
+ def schema_map(column)
+ if column.sql_type =~ /Map\(([^,]+),\s*(Array)\)/
+ return :array
+ end
+
+ (column.sql_type =~ /Map\(/).nil? ? nil : true
end
+ def schema_low_cardinality(column)
+ (column.sql_type =~ /LowCardinality\(/).nil? ? nil : true
+ end
+
+ # @param [ActiveRecord::ConnectionAdapters::Clickhouse::Column] column
def prepare_column_options(column)
spec = {}
spec[:unsigned] = schema_unsigned(column)
spec[:array] = schema_array(column)
+ spec[:map] = schema_map(column)
+ if spec[:map] == :array
+ spec[:array] = nil
+ end
+ spec[:low_cardinality] = schema_low_cardinality(column)
+ spec[:codec] = column.codec.inspect if column.codec
spec.merge(super).compact
end
+
+ def index_parts(index)
+ idx = index.match(/^INDEX (?\S+) (?.*?) TYPE (?.*?) GRANULARITY (?\d+)$/)
+ index_parts = [
+ format_index_parts(idx['expr']),
+ "name: #{format_index_parts(idx['name'])}",
+ "type: #{format_index_parts(idx['type'])}",
+ ]
+ index_parts << "granularity: #{idx['granularity']}" if idx['granularity']
+ index_parts
+ end
end
end
diff --git a/lib/clickhouse-activerecord/tasks.rb b/lib/clickhouse-activerecord/tasks.rb
index f3e20033..a4a8a85c 100644
--- a/lib/clickhouse-activerecord/tasks.rb
+++ b/lib/clickhouse-activerecord/tasks.rb
@@ -2,16 +2,19 @@
module ClickhouseActiverecord
class Tasks
+ delegate :connection, :establish_connection, to: ActiveRecord::Base
- delegate :connection, :establish_connection, :clear_active_connections!, to: ActiveRecord::Base
+ def self.using_database_configurations?
+ true
+ end
def initialize(configuration)
- @configuration = configuration.with_indifferent_access
+ @configuration = configuration
end
def create
establish_master_connection
- connection.create_database @configuration["database"]
+ connection.create_database @configuration.database
rescue ActiveRecord::StatementInvalid => e
if e.cause.to_s.include?('already exists')
raise ActiveRecord::DatabaseAlreadyExists
@@ -22,22 +25,38 @@ def create
def drop
establish_master_connection
- connection.drop_database @configuration["database"]
+ connection.drop_database @configuration.database
end
def purge
- clear_active_connections!
+ ActiveRecord::Base.connection_handler.clear_active_connections!(:all)
drop
create
end
def structure_dump(*args)
- tables = connection.execute("SHOW TABLES FROM #{@configuration['database']}")['data'].flatten
+ establish_master_connection
+
+ # get all tables
+ tables = connection.execute("SHOW TABLES FROM #{@configuration.database} WHERE name NOT LIKE '.inner_id.%'")['data'].flatten.map do |table|
+ next if %w[schema_migrations ar_internal_metadata].include?(table)
+ connection.show_create_table(table, single_line: false).gsub("#{@configuration.database}.", '')
+ end.compact
+
+ # sort view to last
+ tables.sort_by! {|table| table.match(/^CREATE\s+(MATERIALIZED\s+)?VIEW/) ? 1 : 0}
+ # get all functions
+ functions = connection.execute("SELECT create_query FROM system.functions WHERE origin = 'SQLUserDefined' ORDER BY name")['data'].flatten
+
+ # put to file
File.open(args.first, 'w:utf-8') do |file|
+ functions.each do |function|
+ file.puts function.gsub('\\n', "\n") + ";\n\n"
+ end
+
tables.each do |table|
- next if table.match(/\.inner/)
- file.puts connection.execute("SHOW CREATE TABLE #{table}")['data'].try(:first).try(:first).gsub("#{@configuration['database']}.", '') + ";\n\n"
+ file.puts table + ";\n\n"
end
end
end
@@ -48,6 +67,8 @@ def structure_load(*args)
next
elsif sql =~ /^INSERT INTO/
connection.do_execute(sql, nil, format: nil)
+ elsif sql =~ /^CREATE .*?FUNCTION/
+ connection.do_execute(sql, nil, format: nil)
else
connection.execute(sql)
end
diff --git a/lib/clickhouse-activerecord/version.rb b/lib/clickhouse-activerecord/version.rb
index 5b33f3e4..4b9e83fb 100644
--- a/lib/clickhouse-activerecord/version.rb
+++ b/lib/clickhouse-activerecord/version.rb
@@ -1,3 +1,3 @@
module ClickhouseActiverecord
- VERSION = '0.5.14'
+ VERSION = '1.2.1'
end
diff --git a/lib/core_extensions/active_record/internal_metadata.rb b/lib/core_extensions/active_record/internal_metadata.rb
new file mode 100644
index 00000000..ba1272f4
--- /dev/null
+++ b/lib/core_extensions/active_record/internal_metadata.rb
@@ -0,0 +1,71 @@
+module CoreExtensions
+ module ActiveRecord
+ module InternalMetadata
+
+ def create_table
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ return if !enabled? || table_exists?
+
+ key_options = connection.internal_string_options_for_primary_key
+ table_options = {
+ id: false,
+ options: 'ReplacingMergeTree(created_at) PARTITION BY key ORDER BY key',
+ if_not_exists: true
+ }
+ full_config = connection.instance_variable_get(:@config) || {}
+
+ if full_config[:distributed_service_tables]
+ table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(created_at)')
+
+ distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
+ else
+ distributed_suffix = ''
+ end
+
+ connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
+ t.string :key, **key_options
+ t.string :value
+ t.timestamps
+ end
+ end
+
+ private
+
+ def update_entry(connection_or_key, key_or_new_value, new_value = nil)
+ if ::ActiveRecord::version >= Gem::Version.new('7.2')
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ create_entry(connection_or_key, key_or_new_value, new_value)
+ else
+ return super(connection_or_key, key_or_new_value) unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ create_entry(connection_or_key, key_or_new_value)
+ end
+ end
+
+ def select_entry(connection_or_key, key = nil)
+ if ::ActiveRecord::version >= Gem::Version.new('7.2')
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ else
+ key = connection_or_key
+ return super(key) unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ end
+
+ sm = ::Arel::SelectManager.new(arel_table)
+ sm.final! if connection.table_options(table_name)[:options] =~ /^ReplacingMergeTree/
+ sm.project(::Arel.star)
+ sm.where(arel_table[primary_key].eq(::Arel::Nodes::BindParam.new(key)))
+ sm.order(arel_table[primary_key].asc)
+ sm.limit = 1
+
+ connection.select_one(sm, "#{self.class} Load")
+ end
+
+ def connection
+ if ::ActiveRecord::version >= Gem::Version.new('7.2')
+ @pool.lease_connection
+ else
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/core_extensions/active_record/migration/command_recorder.rb b/lib/core_extensions/active_record/migration/command_recorder.rb
similarity index 100%
rename from core_extensions/active_record/migration/command_recorder.rb
rename to lib/core_extensions/active_record/migration/command_recorder.rb
diff --git a/lib/core_extensions/active_record/relation.rb b/lib/core_extensions/active_record/relation.rb
index f936005e..4d55f006 100644
--- a/lib/core_extensions/active_record/relation.rb
+++ b/lib/core_extensions/active_record/relation.rb
@@ -10,6 +10,143 @@ def reverse_order!
self.order_values = (column_names & %w[date created_at]).map { |c| arel_table[c].desc }
self
end
+
+ # Define settings in the SETTINGS clause of the SELECT query. The setting value is applied only to that query and is reset to the default or previous value after the query is executed.
+ # For example:
+ #
+ # users = User.settings(optimize_read_in_order: 1, cast_keep_nullable: 1).where(name: 'John')
+ # # SELECT users.* FROM users WHERE users.name = 'John' SETTINGS optimize_read_in_order = 1, cast_keep_nullable = 1
+ #
+ # An ActiveRecord::ActiveRecordError will be raised if database not ClickHouse.
+ # @param [Hash] opts
+ def settings(**opts)
+ spawn.settings!(**opts)
+ end
+
+ # @param [Hash] opts
+ def settings!(**opts)
+ check_command('SETTINGS')
+ @values[:settings] = (@values[:settings] || {}).merge opts
+ self
+ end
+
+ # When FINAL is specified, ClickHouse fully merges the data before returning the result and thus performs all data transformations that happen during merges for the given table engine.
+ # For example:
+ #
+ # users = User.final.all
+ # # SELECT users.* FROM users FINAL
+ #
+ # An ActiveRecord::ActiveRecordError will be raised if database not ClickHouse.
+ def final
+ spawn.final!
+ end
+
+ def final!
+ check_command('FINAL')
+ @values[:final] = true
+ self
+ end
+
+ # GROUPING SETS allows you to specify multiple groupings in the GROUP BY clause.
+ # Whereas GROUP BY CUBE generates all possible groupings, GROUP BY GROUPING SETS generates only the specified groupings.
+ # For example:
+ #
+ # users = User.group_by_grouping_sets([], [:name], [:name, :age]).select(:name, :age, 'count(*)')
+ # # SELECT name, age, count(*) FROM users GROUP BY GROUPING SETS ( (), (name), (name, age) )
+ #
+ # which is generally equivalent to:
+ # # SELECT NULL, NULL, count(*) FROM users
+ # # UNION ALL
+ # # SELECT name, NULL, count(*) FROM users GROUP BY name
+ # # UNION ALL
+ # # SELECT name, age, count(*) FROM users GROUP BY name, age
+ #
+ # Raises ArgumentError if no grouping sets are specified are provided.
+ def group_by_grouping_sets(*grouping_sets)
+ raise ArgumentError, 'The method .group_by_grouping_sets() must contain arguments.' if grouping_sets.blank?
+
+ spawn.group_by_grouping_sets!(*grouping_sets)
+ end
+
+ def group_by_grouping_sets!(*grouping_sets) # :nodoc:
+ grouping_sets = grouping_sets.map { |set| arel_columns(set) }
+ self.group_values += [::Arel::Nodes::GroupingSets.new(grouping_sets)]
+ self
+ end
+
+ # The USING clause specifies one or more columns to join, which establishes the equality of these columns. For example:
+ #
+ # users = User.joins(:joins).using(:event_name, :date)
+ # # SELECT users.* FROM users INNER JOIN joins USING event_name,date
+ #
+ # An ActiveRecord::ActiveRecordError will be raised if database not ClickHouse.
+ # @param [Array] opts
+ def using(*opts)
+ spawn.using!(*opts)
+ end
+
+ # @param [Array] opts
+ def using!(*opts)
+ @values[:using] = opts
+ self
+ end
+
+ # Windows functions let you perform calculations across a set of rows that are related to the current row. For example:
+ #
+ # users = User.window('x', order: 'date', partition: 'name', rows: 'UNBOUNDED PRECEDING').select('sum(value) OVER x')
+ # # SELECT sum(value) OVER x FROM users WINDOW x AS (PARTITION BY name ORDER BY date ROWS UNBOUNDED PRECEDING)
+ #
+ # @param [String] name
+ # @param [Hash] opts
+ def window(name, **opts)
+ spawn.window!(name, **opts)
+ end
+
+ def window!(name, **opts)
+ @values[:windows] = [] unless @values[:windows]
+ @values[:windows] << [name, opts]
+ self
+ end
+
+ # The LIMIT BY clause permit to improve deduplication based on a unique key, it has better performances than
+ # the GROUP BY clause
+ #
+ # users = User.limit_by(1, id)
+ # # SELECT users.* FROM users LIMIT 1 BY id
+ #
+ # An ActiveRecord::ActiveRecordError will be reaised if database is not Clickhouse.
+ # @param [Array] opts
+ def limit_by(*opts)
+ spawn.limit_by!(*opts)
+ end
+
+ # @param [Array] opts
+ def limit_by!(*opts)
+ @values[:limit_by] = *opts
+ self
+ end
+
+ private
+
+ def check_command(cmd)
+ raise ::ActiveRecord::ActiveRecordError, cmd + ' is a ClickHouse specific query clause' unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+ end
+
+ def build_arel(connection_or_aliases = nil, aliases = nil)
+ if ::ActiveRecord::version >= Gem::Version.new('7.2')
+ arel = super
+ else
+ arel = super(connection_or_aliases)
+ end
+
+ arel.final! if @values[:final].present?
+ arel.limit_by(*@values[:limit_by]) if @values[:limit_by].present?
+ arel.settings(@values[:settings]) if @values[:settings].present?
+ arel.using(@values[:using]) if @values[:using].present?
+ arel.windows(@values[:windows]) if @values[:windows].present?
+
+ arel
+ end
end
end
end
diff --git a/lib/core_extensions/active_record/schema_migration.rb b/lib/core_extensions/active_record/schema_migration.rb
new file mode 100644
index 00000000..bafb6ff1
--- /dev/null
+++ b/lib/core_extensions/active_record/schema_migration.rb
@@ -0,0 +1,60 @@
+module CoreExtensions
+ module ActiveRecord
+ module SchemaMigration
+
+ def create_table
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+
+ return if table_exists?
+
+ version_options = connection.internal_string_options_for_primary_key
+ table_options = {
+ id: false, options: 'ReplacingMergeTree(ver) ORDER BY (version)', if_not_exists: true
+ }
+ full_config = connection.instance_variable_get(:@config) || {}
+
+ if full_config[:distributed_service_tables]
+ table_options.merge!(with_distributed: table_name, sharding_key: 'cityHash64(version)')
+
+ distributed_suffix = "_#{full_config[:distributed_service_tables_suffix] || 'distributed'}"
+ else
+ distributed_suffix = ''
+ end
+
+ connection.create_table(table_name + distributed_suffix.to_s, **table_options) do |t|
+ t.string :version, **version_options
+ t.column :active, 'Int8', null: false, default: '1'
+ t.datetime :ver, null: false, default: -> { 'now()' }
+ end
+ end
+
+ def delete_version(version)
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+
+ im = ::Arel::InsertManager.new(arel_table)
+ im.insert(arel_table[primary_key] => version.to_s, arel_table['active'] => 0)
+ connection.insert(im, "#{self.class} Create Rollback Version", primary_key, version)
+ end
+
+ def versions
+ return super unless connection.is_a?(::ActiveRecord::ConnectionAdapters::ClickhouseAdapter)
+
+ sm = ::Arel::SelectManager.new(arel_table)
+ sm.final!
+ sm.project(arel_table[primary_key])
+ sm.order(arel_table[primary_key].asc)
+ sm.where([arel_table['active'].eq(1)])
+
+ connection.select_values(sm, "#{self.class} Load")
+ end
+
+ def connection
+ if ::ActiveRecord::version >= Gem::Version.new('7.2')
+ @pool.lease_connection
+ else
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/lib/core_extensions/arel/nodes/select_core.rb b/lib/core_extensions/arel/nodes/select_core.rb
new file mode 100644
index 00000000..50be451c
--- /dev/null
+++ b/lib/core_extensions/arel/nodes/select_core.rb
@@ -0,0 +1,19 @@
+module CoreExtensions
+ module Arel # :nodoc: all
+ module Nodes
+ module SelectCore
+ attr_accessor :final
+
+ def source
+ return super unless final
+
+ ::Arel::Nodes::Final.new(super)
+ end
+
+ def eql?(other)
+ super && final == other.final
+ end
+ end
+ end
+ end
+end
diff --git a/lib/core_extensions/arel/nodes/select_statement.rb b/lib/core_extensions/arel/nodes/select_statement.rb
new file mode 100644
index 00000000..f5b2d62b
--- /dev/null
+++ b/lib/core_extensions/arel/nodes/select_statement.rb
@@ -0,0 +1,21 @@
+module CoreExtensions
+ module Arel # :nodoc: all
+ module Nodes
+ module SelectStatement
+ attr_accessor :limit_by, :settings
+
+ def initialize(relation = nil)
+ super
+ @limit_by = nil
+ @settings = nil
+ end
+
+ def eql?(other)
+ super &&
+ limit_by == other.limit_by &&
+ settings == other.settings
+ end
+ end
+ end
+ end
+end
diff --git a/lib/core_extensions/arel/select_manager.rb b/lib/core_extensions/arel/select_manager.rb
new file mode 100644
index 00000000..34974108
--- /dev/null
+++ b/lib/core_extensions/arel/select_manager.rb
@@ -0,0 +1,39 @@
+module CoreExtensions
+ module Arel
+ module SelectManager
+
+ def final!
+ @ctx.final = true
+ self
+ end
+
+ # @param [Hash] values
+ def settings(values)
+ @ast.settings = ::Arel::Nodes::Settings.new(values)
+ self
+ end
+
+ # @param [Array] windows
+ def windows(windows)
+ @ctx.windows = windows.map do |name, opts|
+ # https://github.com/rails/rails/blob/main/activerecord/test/cases/arel/select_manager_test.rb#L790
+ window = ::Arel::Nodes::NamedWindow.new(name)
+ opts.each do |key, value|
+ window.send(key, value)
+ end
+ window
+ end
+ end
+
+ def using(*exprs)
+ @ctx.source.right.last.right = ::Arel::Nodes::Using.new(::Arel.sql(exprs.join(',')))
+ self
+ end
+
+ def limit_by(*exprs)
+ @ast.limit_by = ::Arel::Nodes::LimitBy.new(*exprs)
+ self
+ end
+ end
+ end
+end
diff --git a/lib/clickhouse-activerecord/arel/table.rb b/lib/core_extensions/arel/table.rb
similarity index 58%
rename from lib/clickhouse-activerecord/arel/table.rb
rename to lib/core_extensions/arel/table.rb
index 5a9e6828..a49f2461 100644
--- a/lib/clickhouse-activerecord/arel/table.rb
+++ b/lib/core_extensions/arel/table.rb
@@ -1,6 +1,6 @@
-module ClickhouseActiverecord
+module CoreExtensions
module Arel
- class Table < ::Arel::Table
+ module Table
def is_view
type_caster.is_view
end
diff --git a/lib/tasks/clickhouse.rake b/lib/tasks/clickhouse.rake
index 649ff348..1676b22d 100644
--- a/lib/tasks/clickhouse.rake
+++ b/lib/tasks/clickhouse.rake
@@ -1,86 +1,90 @@
# frozen_string_literal: true
namespace :clickhouse do
-
task prepare_schema_migration_table: :environment do
- ClickhouseActiverecord::SchemaMigration.create_table unless ENV['simple'] || ARGV.map{|a| a.include?('--simple') ? true : nil}.compact.any?
+ connection = ActiveRecord::Tasks::DatabaseTasks.migration_connection
+ connection.schema_migration.create_table unless ENV['simple'] || ARGV.any? { |a| a.include?('--simple') }
end
task prepare_internal_metadata_table: :environment do
- ClickhouseActiverecord::InternalMetadata.create_table unless ENV['simple'] || ARGV.map{|a| a.include?('--simple') ? true : nil}.compact.any?
- end
-
- task load_config: :environment do
- ENV['SCHEMA'] = "db/clickhouse_schema.rb"
- ActiveRecord::Migrator.migrations_paths = ["db/migrate_clickhouse"]
- ActiveRecord::Base.establish_connection(:"#{Rails.env}_clickhouse")
+ connection = ActiveRecord::Tasks::DatabaseTasks.migration_connection
+ connection.internal_metadata.create_table unless ENV['simple'] || ARGV.any? { |a| a.include?('--simple') }
end
namespace :schema do
-
- # todo not testing
+ # TODO: deprecated
desc 'Load database schema'
- task load: [:load_config, :prepare_internal_metadata_table] do |t, args|
- simple = ENV['simple'] || ARGV.map{|a| a.include?('--simple') ? true : nil}.compact.any? ? '_simple' : nil
- ClickhouseActiverecord::SchemaMigration.drop_table
- load("#{Rails.root}/db/clickhouse_schema#{simple}.rb")
+ task load: %i[prepare_internal_metadata_table] do
+ puts 'Warning: `rake clickhouse:schema:load` is deprecated! Use `rake db:schema:load:clickhouse` instead'
+ simple = ENV['simple'] || ARGV.any? { |a| a.include?('--simple') } ? '_simple' : nil
+ ActiveRecord::Base.establish_connection(:clickhouse)
+ connection = ActiveRecord::Tasks::DatabaseTasks.migration_connection
+ connection.schema_migration.drop_table
+ load(Rails.root.join("db/clickhouse_schema#{simple}.rb"))
end
+ # TODO: deprecated
desc 'Dump database schema'
- task dump: :environment do |t, args|
- simple = ENV['simple'] || args[:simple] || ARGV.map{|a| a.include?('--simple') ? true : nil}.compact.any? ? '_simple' : nil
- filename = "#{Rails.root}/db/clickhouse_schema#{simple}.rb"
+ task dump: :environment do |_, args|
+ puts 'Warning: `rake clickhouse:schema:dump` is deprecated! Use `rake db:schema:dump:clickhouse` instead'
+ simple = ENV['simple'] || args[:simple] || ARGV.any? { |a| a.include?('--simple') } ? '_simple' : nil
+ filename = Rails.root.join("db/clickhouse_schema#{simple}.rb")
File.open(filename, 'w:utf-8') do |file|
- ActiveRecord::Base.establish_connection(:"#{Rails.env}_clickhouse")
- ClickhouseActiverecord::SchemaDumper.dump(ActiveRecord::Base.connection, file, ActiveRecord::Base, !!simple)
+ ActiveRecord::Base.establish_connection(:clickhouse)
+ ClickhouseActiverecord::SchemaDumper.dump(ActiveRecord::Base.connection, file, ActiveRecord::Base, simple.present?)
end
end
-
end
namespace :structure do
+ config = ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'clickhouse')
+
desc 'Load database structure'
- task load: [:load_config, 'db:check_protected_environments'] do
- ClickhouseActiverecord::Tasks.new(ActiveRecord::Base.configurations["#{Rails.env}_clickhouse"]).structure_load("#{Rails.root}/db/clickhouse_structure.sql")
+ task load: ['db:check_protected_environments'] do
+ ClickhouseActiverecord::Tasks.new(config).structure_load(Rails.root.join('db/clickhouse_structure.sql'))
end
desc 'Dump database structure'
- task dump: [:load_config, 'db:check_protected_environments'] do
- ClickhouseActiverecord::Tasks.new(ActiveRecord::Base.configurations["#{Rails.env}_clickhouse"]).structure_dump("#{Rails.root}/db/clickhouse_structure.sql")
+ task dump: ['db:check_protected_environments'] do
+ ClickhouseActiverecord::Tasks.new(config).structure_dump(Rails.root.join('db/clickhouse_structure.sql'))
end
end
desc 'Creates the database from DATABASE_URL or config/database.yml'
- task create: [:load_config] do
- ActiveRecord::Tasks::DatabaseTasks.create(ActiveRecord::Base.configurations["#{Rails.env}_clickhouse"])
+ task create: [] do
+ puts 'Warning: `rake clickhouse:create` is deprecated! Use `rake db:create:clickhouse` instead'
end
desc 'Drops the database from DATABASE_URL or config/database.yml'
- task drop: [:load_config, 'db:check_protected_environments'] do
- ActiveRecord::Tasks::DatabaseTasks.drop(ActiveRecord::Base.configurations["#{Rails.env}_clickhouse"])
+ task drop: ['db:check_protected_environments'] do
+ puts 'Warning: `rake clickhouse:drop` is deprecated! Use `rake db:drop:clickhouse` instead'
end
desc 'Empty the database from DATABASE_URL or config/database.yml'
- task purge: [:load_config, 'db:check_protected_environments'] do
- ActiveRecord::Tasks::DatabaseTasks.purge(ActiveRecord::Base.configurations["#{Rails.env}_clickhouse"])
+ task purge: ['db:check_protected_environments'] do
+ puts 'Warning: `rake clickhouse:purge` is deprecated! Use `rake db:reset:clickhouse` instead'
end
# desc 'Resets your database using your migrations for the current environment'
- task reset: :load_config do
- Rake::Task['clickhouse:purge'].execute
- Rake::Task['clickhouse:migrate'].execute
+ task :reset do
+ puts 'Warning: `rake clickhouse:reset` is deprecated! Use `rake db:reset:clickhouse` instead'
end
desc 'Migrate the clickhouse database'
- task migrate: [:load_config, :prepare_schema_migration_table, :prepare_internal_metadata_table] do
- Rake::Task['db:migrate'].execute
- if File.exists? "#{Rails.root}/db/clickhouse_schema_simple.rb"
+ task migrate: %i[prepare_schema_migration_table prepare_internal_metadata_table] do
+ puts 'Warning: `rake clickhouse:migrate` is deprecated! Use `rake db:migrate:clickhouse` instead'
+ Rake::Task['db:migrate:clickhouse'].execute
+ if File.exist? "#{Rails.root}/db/clickhouse_schema_simple.rb"
Rake::Task['clickhouse:schema:dump'].execute(simple: true)
end
end
desc 'Rollback the clickhouse database'
- task rollback: [:load_config, :prepare_schema_migration_table, :prepare_internal_metadata_table] do
- Rake::Task['db:rollback'].execute
+ task rollback: %i[prepare_schema_migration_table prepare_internal_metadata_table] do
+ puts 'Warning: `rake clickhouse:rollback` is deprecated! Use `rake db:rollback:clickhouse` instead'
+ Rake::Task['db:rollback:clickhouse'].execute
+ if File.exist? "#{Rails.root}/db/clickhouse_schema_simple.rb"
+ Rake::Task['clickhouse:schema:dump'].execute(simple: true)
+ end
end
end
diff --git a/spec/cases/migration_spec.rb b/spec/cases/migration_spec.rb
deleted file mode 100644
index cbd8b2ad..00000000
--- a/spec/cases/migration_spec.rb
+++ /dev/null
@@ -1,319 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.describe 'Migration', :migrations do
- describe 'performs migrations' do
- let(:model) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'some'
- end
- end
-
- context 'table creation' do
- context 'plain' do
- it 'creates a table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'plain_table_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(2)
- expect(current_schema).to have_key('id')
- expect(current_schema).to have_key('date')
- expect(current_schema['id'].sql_type).to eq('UInt64')
- expect(current_schema['date'].sql_type).to eq('Date')
- end
- end
-
- context 'dsl' do
- context 'empty' do
- it 'creates a table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(1)
- expect(current_schema).to have_key('id')
- expect(current_schema['id'].sql_type).to eq('UInt32')
- end
- end
-
- context 'with engine' do
- it 'creates a table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_with_engine_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(2)
- expect(current_schema).to have_key('id')
- expect(current_schema).to have_key('date')
- expect(current_schema['id'].sql_type).to eq('UInt32')
- expect(current_schema['date'].sql_type).to eq('Date')
- end
- end
-
- context 'types' do
- context 'decimal' do
- it 'creates a table with valid scale and precision' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_with_decimal_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(3)
- expect(current_schema).to have_key('id')
- expect(current_schema).to have_key('money')
- expect(current_schema).to have_key('balance')
- expect(current_schema['id'].sql_type).to eq('UInt32')
- expect(current_schema['money'].sql_type).to eq('Nullable(Decimal(16, 4))')
- expect(current_schema['balance'].sql_type).to eq('Decimal(32, 2)')
- end
- end
-
- context 'uuid' do
- it 'creates a table with uuid columns' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_with_uuid_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(2)
- expect(current_schema).to have_key('col1')
- expect(current_schema).to have_key('col2')
- expect(current_schema['col1'].sql_type).to eq('UUID')
- expect(current_schema['col2'].sql_type).to eq('Nullable(UUID)')
- end
- end
-
- context 'datetime' do
- it 'creates a table with datetime columns' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_with_datetime_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(2)
- expect(current_schema).to have_key('datetime')
- expect(current_schema).to have_key('datetime64')
- expect(current_schema['datetime'].sql_type).to eq('DateTime')
- expect(current_schema['datetime64'].sql_type).to eq('Nullable(DateTime64(3))')
- end
- end
-
- context 'low_cardinality' do
- it 'creates a table with low cardinality columns' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_with_low_cardinality_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(3)
- expect(current_schema).to have_key('col1')
- expect(current_schema).to have_key('col2')
- expect(current_schema).to have_key('col3')
- expect(current_schema['col1'].sql_type).to eq('LowCardinality(String)')
- expect(current_schema['col2'].sql_type).to eq('LowCardinality(Nullable(String))')
- expect(current_schema['col3'].sql_type).to eq('Array(LowCardinality(Nullable(String)))')
- end
- end
-
- context 'fixed_string' do
- it 'creates a table with fixed string columns' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_with_fixed_string_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(2)
- expect(current_schema).to have_key('fixed_string1')
- expect(current_schema).to have_key('fixed_string16_array')
- expect(current_schema['fixed_string1'].sql_type).to eq('FixedString(1)')
- expect(current_schema['fixed_string16_array'].sql_type).to eq('Array(Nullable(FixedString(16)))')
- end
- end
-
- context 'enum' do
- it 'creates a table with enum columns' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_table_with_enum_creation')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(3)
- expect(current_schema).to have_key('enum8')
- expect(current_schema).to have_key('enum16')
- expect(current_schema).to have_key('enum_nullable')
- expect(current_schema['enum8'].sql_type).to eq("Enum8('key1' = 1, 'key2' = 2)")
- expect(current_schema['enum16'].sql_type).to eq("Enum16('key1' = 1, 'key2' = 2)")
- expect(current_schema['enum_nullable'].sql_type).to eq("Nullable(Enum8('key1' = 1, 'key2' = 2))")
- end
- end
- end
-
- context 'with distributed' do
- let(:model_distributed) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'some_distributed'
- end
- end
- if ActiveRecord::version >= Gem::Version.new('6')
- connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
- else
- connection_config = ActiveRecord::Base.connection_config
- end
-
- before(:all) do
- ActiveRecord::Base.establish_connection(connection_config.merge(cluster_name: CLUSTER_NAME))
- end
-
- after(:all) do
- ActiveRecord::Base.establish_connection(connection_config)
- end
-
- it 'creates a table with distributed table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_create_table_with_distributed')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).up }
-
- current_schema = schema(model)
- current_schema_distributed = schema(model_distributed)
-
- expect(current_schema.keys.count).to eq(1)
- expect(current_schema_distributed.keys.count).to eq(1)
-
- expect(current_schema).to have_key('date')
- expect(current_schema_distributed).to have_key('date')
-
- expect(current_schema['date'].sql_type).to eq('Date')
- expect(current_schema_distributed['date'].sql_type).to eq('Date')
- end
-
- it 'drops a table with distributed table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_create_table_with_distributed')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).up }
-
- expect(ActiveRecord::Base.connection.tables).to include('some')
- expect(ActiveRecord::Base.connection.tables).to include('some_distributed')
-
- quietly do
- ClickhouseClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).down
- end
-
- expect(ActiveRecord::Base.connection.tables).not_to include('some')
- expect(ActiveRecord::Base.connection.tables).not_to include('some_distributed')
- end
- end
-
- context 'view' do
- it 'creates a view' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_create_view_with_to_section')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).up }
-
- expect(ActiveRecord::Base.connection.tables).to include('some_view')
- end
-
- it 'drops a view' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_create_view_without_to_section')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).up }
-
- expect(ActiveRecord::Base.connection.tables).to include('some_view')
-
- quietly do
- ClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).down
- end
-
- expect(ActiveRecord::Base.connection.tables).not_to include('some_view')
- end
- end
- end
-
- context 'with alias in cluster_name' do
- let(:model) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'some'
- end
- end
- if ActiveRecord::version >= Gem::Version.new('6')
- connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
- else
- connection_config = ActiveRecord::Base.connection_config
- end
-
- before(:all) do
- ActiveRecord::Base.establish_connection(connection_config.merge(cluster_name: '{cluster}'))
- end
-
- after(:all) do
- ActiveRecord::Base.establish_connection(connection_config)
- end
-
- it 'creates a table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_create_table_with_cluster_name_alias')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(1)
- expect(current_schema).to have_key('date')
- expect(current_schema['date'].sql_type).to eq('Date')
- end
-
- it 'drops a table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_create_table_with_cluster_name_alias')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).up }
-
- expect(ActiveRecord::Base.connection.tables).to include('some')
-
- quietly do
- ClickhouseClickhouseActiverecord::MigrationContext.new(migrations_dir, ClickhouseActiverecord::SchemaMigration).down
- end
-
- expect(ActiveRecord::Base.connection.tables).not_to include('some')
- end
- end
- end
-
- describe 'drop table' do
- it 'drops table' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_drop_table')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up(1) }
-
- expect(ActiveRecord::Base.connection.tables).to include('some')
-
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up(2) }
-
- expect(ActiveRecord::Base.connection.tables).not_to include('some')
- end
- end
-
- describe 'add column' do
- it 'adds a new column' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_add_column')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(3)
- expect(current_schema).to have_key('id')
- expect(current_schema).to have_key('date')
- expect(current_schema).to have_key('new_column')
- expect(current_schema['id'].sql_type).to eq('UInt32')
- expect(current_schema['date'].sql_type).to eq('Date')
- expect(current_schema['new_column'].sql_type).to eq('Nullable(UInt64)')
- end
- end
-
- describe 'drop column' do
- it 'drops column' do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_drop_column')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
-
- current_schema = schema(model)
-
- expect(current_schema.keys.count).to eq(1)
- expect(current_schema).to have_key('date')
- expect(current_schema['date'].sql_type).to eq('Date')
- end
- end
- end
-end
diff --git a/spec/cases/model_spec.rb b/spec/cases/model_spec.rb
deleted file mode 100644
index 79e8dc81..00000000
--- a/spec/cases/model_spec.rb
+++ /dev/null
@@ -1,106 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.describe 'Model', :migrations do
-
- let(:date) { Date.today }
-
- context 'sample' do
- let!(:model) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'events'
- end
- end
-
- before do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_sample_data')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
- end
-
-
- describe '#create' do
- it 'creates a new record' do
- expect {
- model.create!(
- event_name: 'some event',
- date: date
- )
- }.to change { model.count }
- end
- end
-
- describe '#update' do
- let(:record) { model.create!(event_name: 'some event', date: date) }
-
- it 'raises an error' do
- expect {
- record.update!(event_name: 'new event name')
- }.to raise_error(ActiveRecord::ActiveRecordError, 'Clickhouse update is not supported')
- end
- end
-
- describe '#destroy' do
- let(:record) { model.create!(event_name: 'some event', date: date) }
-
- it 'raises an error' do
- expect {
- record.destroy!
- }.to raise_error(ActiveRecord::ActiveRecordError, 'Clickhouse delete is not supported')
- end
- end
-
- describe '#reverse_order!' do
- it 'blank' do
- expect(model.all.reverse_order!.map(&:event_name)).to eq([])
- end
-
- it 'select' do
- model.create!(event_name: 'some event 1', date: 1.day.ago)
- model.create!(event_name: 'some event 2', date: 2.day.ago)
- expect(model.all.reverse_order!.map(&:event_name)).to eq(['some event 1', 'some event 2'])
- end
- end
- end
-
- context 'array' do
-
- let!(:model) do
- Class.new(ActiveRecord::Base) do
- self.table_name = 'actions'
- end
- end
-
- before do
- migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_array_datetime')
- quietly { ClickhouseActiverecord::MigrationContext.new(migrations_dir, model.connection.schema_migration).up }
- end
-
- describe '#create' do
- it 'creates a new record' do
- expect {
- model.create!(
- array_datetime: [1.day.ago, Time.now, '2022-12-06 15:22:49'],
- array_string: %w[asdf jkl],
- date: date
- )
- }.to change { model.count }
- event = model.first
- expect(event.array_datetime.is_a?(Array)).to be_truthy
- expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
- expect(event.array_string[0].is_a?(String)).to be_truthy
- expect(event.array_string).to eq(%w[asdf jkl])
- end
-
- it 'get record' do
- model.connection.insert("INSERT INTO #{model.table_name} (id, array_datetime, date) VALUES (1, '[''2022-12-06 15:22:49'',''2022-12-05 15:22:49'']', '2022-12-06')")
- expect(model.count).to eq(1)
- event = model.first
- expect(event.date.is_a?(Date)).to be_truthy
- expect(event.date).to eq(Date.parse('2022-12-06'))
- expect(event.array_datetime.is_a?(Array)).to be_truthy
- expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
- expect(event.array_datetime[0]).to eq('2022-12-06 15:22:49')
- expect(event.array_datetime[1]).to eq('2022-12-05 15:22:49')
- end
- end
- end
-end
diff --git a/spec/cluster/migration_spec.rb b/spec/cluster/migration_spec.rb
new file mode 100644
index 00000000..09edf193
--- /dev/null
+++ b/spec/cluster/migration_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+RSpec.describe 'Cluster Migration', :migrations do
+ describe 'performs migrations' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'some'
+ end
+ end
+ let(:directory) { raise 'NotImplemented' }
+ let(:migrations_dir) { File.join(FIXTURES_PATH, 'migrations', directory) }
+ let(:migration_context) { ActiveRecord::MigrationContext.new(migrations_dir) }
+
+ connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
+
+ before(:all) do
+ raise 'Unknown cluster name in config' if connection_config[:cluster_name].blank?
+ end
+
+ subject do
+ quietly { migration_context.up }
+ end
+
+ context 'dsl' do
+ context 'with distributed' do
+ let(:model_distributed) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'some_distributed'
+ end
+ end
+
+ let(:directory) { 'dsl_create_table_with_distributed' }
+ it 'creates a table with distributed table' do
+ subject
+
+ current_schema = schema(model)
+ current_schema_distributed = schema(model_distributed)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema_distributed.keys.count).to eq(1)
+
+ expect(current_schema).to have_key('date')
+ expect(current_schema_distributed).to have_key('date')
+
+ expect(current_schema['date'].sql_type).to eq('Date')
+ expect(current_schema_distributed['date'].sql_type).to eq('Date')
+ end
+
+ it 'drops a table with distributed table' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some')
+ expect(ActiveRecord::Base.connection.tables).to include('some_distributed')
+
+ quietly do
+ migration_context.down
+ end
+
+ expect(ActiveRecord::Base.connection.tables).not_to include('some')
+ expect(ActiveRecord::Base.connection.tables).not_to include('some_distributed')
+ end
+ end
+
+ context "function" do
+ after do
+ ActiveRecord::Base.connection.drop_functions
+ end
+
+ context 'dsl' do
+ let(:directory) { 'dsl_create_function' }
+
+ it 'creates a function' do
+ ActiveRecord::Base.connection.do_execute('CREATE FUNCTION forced_fun AS (x, k, b) -> k*x + b', format: nil)
+
+ subject
+
+ expect(ActiveRecord::Base.connection.functions).to match_array(['some_fun', 'forced_fun'])
+ expect(ActiveRecord::Base.connection.show_create_function('forced_fun').chomp).to eq('CREATE FUNCTION forced_fun AS (x, y) -> (x + y)')
+ end
+ end
+ end
+ end
+
+ context 'with alias in cluster_name' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'some'
+ end
+ end
+ connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
+
+ before(:all) do
+ ActiveRecord::Base.establish_connection(connection_config.merge(cluster_name: '{cluster}'))
+ end
+
+ after(:all) do
+ ActiveRecord::Base.establish_connection(connection_config)
+ end
+
+ let(:directory) { 'dsl_create_table_with_cluster_name_alias' }
+ it 'creates a table' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema).to have_key('date')
+ expect(current_schema['date'].sql_type).to eq('Date')
+ end
+
+ it 'drops a table' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some')
+
+ # Need for sync between clickhouse servers
+ ActiveRecord::Base.connection.execute('SELECT * FROM schema_migrations')
+
+ quietly do
+ migration_context.down
+ end
+
+ expect(ActiveRecord::Base.connection.tables).not_to include('some')
+ end
+ end
+
+ context 'create table with index' do
+ let(:directory) { 'dsl_create_table_with_index' }
+
+ it 'creates a table' do
+
+ expect_any_instance_of(ActiveRecord::ConnectionAdapters::ClickhouseAdapter).to receive(:execute)
+ .with('ALTER TABLE some ON CLUSTER ' + connection_config[:cluster_name] + ' DROP INDEX idx')
+ .and_call_original
+ expect_any_instance_of(ActiveRecord::ConnectionAdapters::ClickhouseAdapter).to receive(:execute)
+ .with('ALTER TABLE some ON CLUSTER ' + connection_config[:cluster_name] + ' ADD INDEX idx2 (int1 * int2) TYPE set(10) GRANULARITY 4')
+ .and_call_original
+
+ subject
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to include('INDEX idx2 int1 * int2 TYPE set(10) GRANULARITY 4')
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb b/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb
index fa1d985b..30d7245e 100644
--- a/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb
+++ b/spec/fixtures/migrations/add_array_datetime/1_create_actions_table.rb
@@ -1,12 +1,12 @@
# frozen_string_literal: true
-class CreateActionsTable < ActiveRecord::Migration[5.0]
+class CreateActionsTable < ActiveRecord::Migration[7.1]
def up
create_table :actions, options: 'MergeTree ORDER BY date', force: true do |t|
t.datetime :array_datetime, null: false, array: true
t.string :array_string, null: false, array: true
+ t.integer :array_int, null: false, array: true
t.date :date, null: false
end
end
end
-
diff --git a/spec/fixtures/migrations/add_map_datetime/1_create_verbs_table.rb b/spec/fixtures/migrations/add_map_datetime/1_create_verbs_table.rb
new file mode 100644
index 00000000..e09f0969
--- /dev/null
+++ b/spec/fixtures/migrations/add_map_datetime/1_create_verbs_table.rb
@@ -0,0 +1,15 @@
+class CreateVerbsTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :verbs, options: 'MergeTree ORDER BY date', force: true do |t|
+ t.datetime :map_datetime, null: false, map: true
+ t.string :map_string, null: false, map: true
+ t.integer :map_int, null: false, map: true
+
+ t.datetime :map_array_datetime, null: false, map: :array
+ t.string :map_array_string, null: false, map: :array
+ t.integer :map_array_int, null: false, map: :array
+
+ t.date :date, null: false
+ end
+ end
+end
diff --git a/spec/fixtures/migrations/add_sample_data/1_create_events_table.rb b/spec/fixtures/migrations/add_sample_data/1_create_events_table.rb
deleted file mode 100644
index 5da04c4c..00000000
--- a/spec/fixtures/migrations/add_sample_data/1_create_events_table.rb
+++ /dev/null
@@ -1,11 +0,0 @@
-# frozen_string_literal: true
-
-class CreateEventsTable < ActiveRecord::Migration[5.0]
- def up
- create_table :events, options: 'MergeTree(date, (date, event_name), 8192)' do |t|
- t.string :event_name, null: false
- t.date :date, null: false
- end
- end
-end
-
diff --git a/spec/fixtures/migrations/add_sample_data/1_create_sample_table.rb b/spec/fixtures/migrations/add_sample_data/1_create_sample_table.rb
new file mode 100644
index 00000000..7fc41814
--- /dev/null
+++ b/spec/fixtures/migrations/add_sample_data/1_create_sample_table.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class CreateSampleTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :sample, id: false, options: 'ReplacingMergeTree PARTITION BY toYYYYMM(date) ORDER BY (event_name)' do |t|
+ t.string :event_name, null: false
+ t.integer :event_value
+ t.boolean :enabled, null: false, default: false
+ t.date :date, null: false
+ t.datetime :datetime, null: false
+ t.datetime :datetime64, precision: 3
+ t.string :byte_array
+ t.uuid :relation_uuid
+ t.decimal :decimal_value, precision: 38, scale: 16
+ end
+ end
+end
diff --git a/spec/fixtures/migrations/add_sample_data/2_create_join_table.rb b/spec/fixtures/migrations/add_sample_data/2_create_join_table.rb
new file mode 100644
index 00000000..17a4bc2b
--- /dev/null
+++ b/spec/fixtures/migrations/add_sample_data/2_create_join_table.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class CreateJoinTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :joins, id: false, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (event_name)' do |t|
+ t.string :event_name, null: false
+ t.integer :event_value
+ t.integer :join_value
+ t.date :date, null: false
+ end
+ end
+end
+
diff --git a/spec/fixtures/migrations/add_sample_data_without_primary_key/1_create_sample_table.rb b/spec/fixtures/migrations/add_sample_data_without_primary_key/1_create_sample_table.rb
new file mode 100644
index 00000000..46c5e846
--- /dev/null
+++ b/spec/fixtures/migrations/add_sample_data_without_primary_key/1_create_sample_table.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class CreateSampleTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :sample_without_key, id: false, options: 'Log' do |t|
+ t.string :event_name, null: false
+ t.integer :event_value
+ t.boolean :enabled, null: false, default: false
+ t.date :date, null: false
+ t.datetime :datetime, null: false
+ t.datetime :datetime64, precision: 3
+ t.string :byte_array
+ t.uuid :relation_uuid
+ t.decimal :decimal_value, precision: 38, scale: 16
+ end
+ end
+end
diff --git a/spec/fixtures/migrations/dsl_add_column/1_create_some_table.rb b/spec/fixtures/migrations/dsl_add_column/1_create_some_table.rb
index 2435e2c6..c12567de 100644
--- a/spec/fixtures/migrations/dsl_add_column/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_add_column/1_create_some_table.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
- create_table :some, options: 'MergeTree(date, (date), 8192)' do |t|
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_add_column/2_modify_some_table.rb b/spec/fixtures/migrations/dsl_add_column/2_modify_some_table.rb
index e6d5b479..afb1dcaf 100644
--- a/spec/fixtures/migrations/dsl_add_column/2_modify_some_table.rb
+++ b/spec/fixtures/migrations/dsl_add_column/2_modify_some_table.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ModifySomeTable < ActiveRecord::Migration[5.0]
+class ModifySomeTable < ActiveRecord::Migration[7.1]
def up
add_column :some, :new_column, :big_integer
end
diff --git a/spec/fixtures/migrations/dsl_create_function/1_create_some_function.rb b/spec/fixtures/migrations/dsl_create_function/1_create_some_function.rb
new file mode 100644
index 00000000..940a1ee6
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_function/1_create_some_function.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+class CreateSomeFunction < ActiveRecord::Migration[7.1]
+ def up
+ create_function :some_fun, "(x,y) -> x + y"
+ create_function :forced_fun, "(x,y) -> x + y", force: true
+ end
+end
diff --git a/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb b/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb
index f07590d2..2c94f452 100644
--- a/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_create_table_with_cluster_name_alias/1_create_some_table.rb
@@ -1,6 +1,6 @@
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def change
- create_table :some, options: 'MergeTree(date, (date), 8192)' do |t|
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)', sync: true, id: false do |t|
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_create_table_with_distributed/1_create_some_table.rb b/spec/fixtures/migrations/dsl_create_table_with_distributed/1_create_some_table.rb
index 20f10dd1..231db516 100644
--- a/spec/fixtures/migrations/dsl_create_table_with_distributed/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_create_table_with_distributed/1_create_some_table.rb
@@ -1,6 +1,6 @@
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def change
- create_table :some_distributed, with_distributed: :some, id: false, options: 'MergeTree(date, (date), 8192)' do |t|
+ create_table :some_distributed, with_distributed: :some, id: false, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_create_table_with_index/1_create_some_table.rb b/spec/fixtures/migrations/dsl_create_table_with_index/1_create_some_table.rb
new file mode 100644
index 00000000..384f2de9
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_table_with_index/1_create_some_table.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+class CreateSomeTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
+ t.integer :int1, null: false
+ t.integer :int2, null: false
+ t.date :date, null: false
+
+ t.index '(int1 * int2, date)', name: 'idx', type: 'minmax', granularity: 3
+ end
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_create_table_with_index/2_drop_index.rb b/spec/fixtures/migrations/dsl_create_table_with_index/2_drop_index.rb
new file mode 100644
index 00000000..066eae78
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_table_with_index/2_drop_index.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+class DropIndex < ActiveRecord::Migration[7.1]
+ def up
+ remove_index :some, 'idx'
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_create_table_with_index/3_create_index.rb b/spec/fixtures/migrations/dsl_create_table_with_index/3_create_index.rb
new file mode 100644
index 00000000..0b0c5898
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_table_with_index/3_create_index.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+class CreateIndex < ActiveRecord::Migration[7.1]
+ def up
+ add_index :some, 'int1 * int2', name: 'idx2', type: 'set(10)', granularity: 4
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_create_view_with_to_section/1_create_some_table_1.rb b/spec/fixtures/migrations/dsl_create_view_with_to_section/1_create_some_table_1.rb
index d687db12..54ad2f0e 100644
--- a/spec/fixtures/migrations/dsl_create_view_with_to_section/1_create_some_table_1.rb
+++ b/spec/fixtures/migrations/dsl_create_view_with_to_section/1_create_some_table_1.rb
@@ -1,6 +1,6 @@
-class CreateSomeTable1 < ActiveRecord::Migration[5.0]
+class CreateSomeTable1 < ActiveRecord::Migration[7.1]
def change
- create_table :some_table_1, options: 'MergeTree() ORDER BY col' do |t|
+ create_table :some_table_1, options: 'MergeTree ORDER BY col' do |t|
t.string :col, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_create_view_with_to_section/2_create_some_table_2.rb b/spec/fixtures/migrations/dsl_create_view_with_to_section/2_create_some_table_2.rb
index 005b137c..452158ed 100644
--- a/spec/fixtures/migrations/dsl_create_view_with_to_section/2_create_some_table_2.rb
+++ b/spec/fixtures/migrations/dsl_create_view_with_to_section/2_create_some_table_2.rb
@@ -1,6 +1,6 @@
-class CreateSomeTable2 < ActiveRecord::Migration[5.0]
+class CreateSomeTable2 < ActiveRecord::Migration[7.1]
def change
- create_table :some_table_2, options: 'MergeTree() ORDER BY col' do |t|
+ create_table :some_table_2, options: 'MergeTree ORDER BY col' do |t|
t.string :col, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_create_view_with_to_section/3_create_some_view.rb b/spec/fixtures/migrations/dsl_create_view_with_to_section/3_create_some_view.rb
index 13ff5af2..ba5b8548 100644
--- a/spec/fixtures/migrations/dsl_create_view_with_to_section/3_create_some_view.rb
+++ b/spec/fixtures/migrations/dsl_create_view_with_to_section/3_create_some_view.rb
@@ -1,4 +1,4 @@
-class CreateSomeView < ActiveRecord::Migration[5.0]
+class CreateSomeView < ActiveRecord::Migration[7.1]
def change
create_view :some_view, materialized: true, as: 'select * from some_table_1', to: 'some_table_2'
end
diff --git a/spec/fixtures/migrations/dsl_create_view_without_id/1_create_some_table.rb b/spec/fixtures/migrations/dsl_create_view_without_id/1_create_some_table.rb
new file mode 100644
index 00000000..a9f9460f
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_create_view_without_id/1_create_some_table.rb
@@ -0,0 +1,7 @@
+class CreateSomeTable < ActiveRecord::Migration[7.1]
+ def change
+ create_table :some, id: false, options: 'MergeTree ORDER BY col' do |t|
+ t.string :col, null: false
+ end
+ end
+end
diff --git a/spec/fixtures/migrations/dsl_create_view_without_to_section/1_create_some_table.rb b/spec/fixtures/migrations/dsl_create_view_without_to_section/1_create_some_table.rb
index 304245bd..05e8612f 100644
--- a/spec/fixtures/migrations/dsl_create_view_without_to_section/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_create_view_without_to_section/1_create_some_table.rb
@@ -1,6 +1,6 @@
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def change
- create_table :some_table, options: 'MergeTree() ORDER BY col' do |t|
+ create_table :some_table, options: 'MergeTree ORDER BY col' do |t|
t.string :col, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_create_view_without_to_section/2_create_some_view.rb b/spec/fixtures/migrations/dsl_create_view_without_to_section/2_create_some_view.rb
index ac867e62..ac319364 100644
--- a/spec/fixtures/migrations/dsl_create_view_without_to_section/2_create_some_view.rb
+++ b/spec/fixtures/migrations/dsl_create_view_without_to_section/2_create_some_view.rb
@@ -1,4 +1,4 @@
-class CreateSomeView < ActiveRecord::Migration[5.0]
+class CreateSomeView < ActiveRecord::Migration[7.1]
def change
create_view :some_view, materialized: true, as: 'select * from some_table'
end
diff --git a/spec/fixtures/migrations/dsl_drop_column/1_create_some_table.rb b/spec/fixtures/migrations/dsl_drop_column/1_create_some_table.rb
index 2435e2c6..c12567de 100644
--- a/spec/fixtures/migrations/dsl_drop_column/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_drop_column/1_create_some_table.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
- create_table :some, options: 'MergeTree(date, (date), 8192)' do |t|
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_drop_column/2_modify_some_table.rb b/spec/fixtures/migrations/dsl_drop_column/2_modify_some_table.rb
index f68d6eda..b0618c85 100644
--- a/spec/fixtures/migrations/dsl_drop_column/2_modify_some_table.rb
+++ b/spec/fixtures/migrations/dsl_drop_column/2_modify_some_table.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class ModifySomeTable < ActiveRecord::Migration[5.0]
+class ModifySomeTable < ActiveRecord::Migration[7.1]
def up
remove_column :some, :id
end
diff --git a/spec/fixtures/migrations/dsl_drop_table/1_create_some_table.rb b/spec/fixtures/migrations/dsl_drop_table/1_create_some_table.rb
index 2435e2c6..c12567de 100644
--- a/spec/fixtures/migrations/dsl_drop_table/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_drop_table/1_create_some_table.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
- create_table :some, options: 'MergeTree(date, (date), 8192)' do |t|
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_drop_table/2_drop_some_table.rb b/spec/fixtures/migrations/dsl_drop_table/2_drop_some_table.rb
index 4c9172b2..2d792781 100644
--- a/spec/fixtures/migrations/dsl_drop_table/2_drop_some_table.rb
+++ b/spec/fixtures/migrations/dsl_drop_table/2_drop_some_table.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class DropSomeTable < ActiveRecord::Migration[5.0]
+class DropSomeTable < ActiveRecord::Migration[7.1]
def up
drop_table :some
end
diff --git a/spec/fixtures/migrations/dsl_drop_table_sync/1_create_some_table.rb b/spec/fixtures/migrations/dsl_drop_table_sync/1_create_some_table.rb
new file mode 100644
index 00000000..c12567de
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_drop_table_sync/1_create_some_table.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class CreateSomeTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
+ t.date :date, null: false
+ end
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_drop_table_sync/2_drop_some_table.rb b/spec/fixtures/migrations/dsl_drop_table_sync/2_drop_some_table.rb
new file mode 100644
index 00000000..e18e54b6
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_drop_table_sync/2_drop_some_table.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+class DropSomeTable < ActiveRecord::Migration[7.1]
+ def up
+ drop_table :some, sync: true
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_table_buffer_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_buffer_creation/1_create_some_table.rb
new file mode 100644
index 00000000..f61c92ea
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_table_buffer_creation/1_create_some_table.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+class CreateSomeTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :some do
+
+ end
+ create_table :some_buffers, as: :some, options: "Buffer(#{connection.database}, some, 1, 10, 60, 100, 10000, 10000000, 100000000)"
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_table_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_creation/1_create_some_table.rb
index 401300ea..37b7461f 100644
--- a/spec/fixtures/migrations/dsl_table_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_creation/1_create_some_table.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
create_table :some do
diff --git a/spec/fixtures/migrations/dsl_table_with_codec/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_codec/1_create_some_table.rb
new file mode 100644
index 00000000..8f331c71
--- /dev/null
+++ b/spec/fixtures/migrations/dsl_table_with_codec/1_create_some_table.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+class CreateSomeTable < ActiveRecord::Migration[7.1]
+ def up
+ create_table :some, id: false, force: true do |t|
+ t.column :custom, "Nullable(UInt64) CODEC(T64, LZ4)"
+ end
+ end
+end
+
diff --git a/spec/fixtures/migrations/dsl_table_with_datetime_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_datetime_creation/1_create_some_table.rb
index ab5d5b41..b3d644e6 100644
--- a/spec/fixtures/migrations/dsl_table_with_datetime_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_with_datetime_creation/1_create_some_table.rb
@@ -1,10 +1,10 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
- create_table :some, id: false do |t|
- t.datetime :datetime, null: false
- t.datetime :datetime64, precision: 3, null: true
+ create_table :some, id: false, force: true do |t|
+ t.datetime :datetime, null: false, default: -> { 'now()' }
+ t.datetime :datetime64, precision: 3, null: true, default: -> { 'now64()' }
end
end
end
diff --git a/spec/fixtures/migrations/dsl_table_with_decimal_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_decimal_creation/1_create_some_table.rb
index 3aedb806..0790eb6d 100644
--- a/spec/fixtures/migrations/dsl_table_with_decimal_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_with_decimal_creation/1_create_some_table.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
create_table :some do |t|
t.decimal :money, precision: 16, scale: 4
t.decimal :balance, precision: 32, scale: 2, null: false, default: 0
+ t.decimal :paid, precision: 32, scale: 2, null: false, default: 1.15
end
end
end
diff --git a/spec/fixtures/migrations/dsl_table_with_engine_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_engine_creation/1_create_some_table.rb
index 2435e2c6..c12567de 100644
--- a/spec/fixtures/migrations/dsl_table_with_engine_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_with_engine_creation/1_create_some_table.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
- create_table :some, options: 'MergeTree(date, (date), 8192)' do |t|
+ create_table :some, options: 'MergeTree PARTITION BY toYYYYMM(date) ORDER BY (date)' do |t|
t.date :date, null: false
end
end
diff --git a/spec/fixtures/migrations/dsl_table_with_enum_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_enum_creation/1_create_some_table.rb
index 15e6eb2e..a39e2e2d 100644
--- a/spec/fixtures/migrations/dsl_table_with_enum_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_with_enum_creation/1_create_some_table.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
create_table :some, id: false do |t|
- t.enum :enum8, value: { key1: 1, key2: 2 }, limit: 1, null: false
+ t.enum :enum8, value: { key1: 1, key2: 2 }, limit: 1, null: false, default: :key1
t.enum :enum16, value: { key1: 1, key2: 2 }, limit: 2, null: false
t.enum :enum_nullable, value: { key1: 1, key2: 2 }, null: true
end
diff --git a/spec/fixtures/migrations/dsl_table_with_fixed_string_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_fixed_string_creation/1_create_some_table.rb
index b209fef3..99becb77 100644
--- a/spec/fixtures/migrations/dsl_table_with_fixed_string_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_with_fixed_string_creation/1_create_some_table.rb
@@ -1,10 +1,12 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
create_table :some, id: false do |t|
t.string :fixed_string1, fixed_string: 1, null: false
t.string :fixed_string16_array, fixed_string: 16, array: true, null: true
+ t.string :fixed_string16_map, fixed_string: 16, map: true, null: true
+ t.string :fixed_string16_map_array, fixed_string: 16, map: :array, null: true
end
end
end
diff --git a/spec/fixtures/migrations/dsl_table_with_low_cardinality_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_low_cardinality_creation/1_create_some_table.rb
index dac079fb..1a9bfff5 100644
--- a/spec/fixtures/migrations/dsl_table_with_low_cardinality_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_with_low_cardinality_creation/1_create_some_table.rb
@@ -1,11 +1,13 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
create_table :some, id: false do |t|
- t.string :col1, low_cardinality: true, null: false
+ t.string :col1, low_cardinality: true, null: false, default: 'col'
t.string :col2, low_cardinality: true, null: true
t.string :col3, low_cardinality: true, array: true, null: true
+ t.string :col4, low_cardinality: true, map: true, null: true
+ t.string :col5, low_cardinality: true, map: :array, null: true
end
end
end
diff --git a/spec/fixtures/migrations/dsl_table_with_uuid_creation/1_create_some_table.rb b/spec/fixtures/migrations/dsl_table_with_uuid_creation/1_create_some_table.rb
index 9b9fad9a..6e2c388d 100644
--- a/spec/fixtures/migrations/dsl_table_with_uuid_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/dsl_table_with_uuid_creation/1_create_some_table.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
create_table :some, id: false do |t|
t.uuid :col1, null: false
diff --git a/spec/fixtures/migrations/plain_function_creation/1_create_some_function.rb b/spec/fixtures/migrations/plain_function_creation/1_create_some_function.rb
new file mode 100644
index 00000000..ede6a3e2
--- /dev/null
+++ b/spec/fixtures/migrations/plain_function_creation/1_create_some_function.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class CreateSomeFunction < ActiveRecord::Migration[7.1]
+ def up
+ sql = <<~SQL
+ CREATE FUNCTION multFun AS (x,y) -> x * y
+ SQL
+ do_execute(sql, format: nil)
+
+ sql = <<~SQL
+ CREATE FUNCTION addFun AS (x,y) -> x + y
+ SQL
+ do_execute(sql, format: nil)
+ end
+end
diff --git a/spec/fixtures/migrations/plain_table_creation/1_create_some_table.rb b/spec/fixtures/migrations/plain_table_creation/1_create_some_table.rb
index e1ade1e5..dc6e6461 100644
--- a/spec/fixtures/migrations/plain_table_creation/1_create_some_table.rb
+++ b/spec/fixtures/migrations/plain_table_creation/1_create_some_table.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-class CreateSomeTable < ActiveRecord::Migration[5.0]
+class CreateSomeTable < ActiveRecord::Migration[7.1]
def up
execute <<~SQL
CREATE TABLE some (
diff --git a/spec/single/migration_spec.rb b/spec/single/migration_spec.rb
new file mode 100644
index 00000000..9b1c574f
--- /dev/null
+++ b/spec/single/migration_spec.rb
@@ -0,0 +1,401 @@
+# frozen_string_literal: true
+
+RSpec.describe 'Migration', :migrations do
+ describe 'performs migrations' do
+ let(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'some'
+ end
+ end
+ let(:directory) { raise 'NotImplemented' }
+ let(:migrations_dir) { File.join(FIXTURES_PATH, 'migrations', directory) }
+ let(:migration_context) { ActiveRecord::MigrationContext.new(migrations_dir, model.connection.schema_migration, model.connection.internal_metadata) }
+
+ connection_config = ActiveRecord::Base.connection_db_config.configuration_hash
+
+ subject do
+ quietly { migration_context.up }
+ end
+
+ context 'database creation' do
+ let(:db) { (0...8).map { (65 + rand(26)).chr }.join.downcase }
+
+ it 'create' do
+ model.connection.create_database(db)
+ end
+
+ after { model.connection.drop_database(db) }
+ end
+
+ context 'table creation' do
+ context 'plain' do
+ let(:directory) { 'plain_table_creation' }
+
+ it 'creates a table' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(2)
+ expect(current_schema).to have_key('id')
+ expect(current_schema).to have_key('date')
+ expect(current_schema['id'].sql_type).to eq('UInt64')
+ expect(current_schema['date'].sql_type).to eq('Date')
+ end
+ end
+
+ context 'dsl' do
+ context 'empty' do
+ let(:directory) { 'dsl_table_creation' }
+ it 'creates a table' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema).to have_key('id')
+ expect(current_schema['id'].sql_type).to eq('UInt32')
+ end
+ end
+
+ context 'without id' do
+ let(:directory) { 'dsl_create_view_without_id' }
+ it 'creates a table' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema).to_not have_key('id')
+ expect(current_schema['col'].sql_type).to eq('String')
+ end
+ end
+
+ context 'with buffer table' do
+ let(:directory) { 'dsl_table_buffer_creation' }
+ it 'creates a table' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some_buffers')
+ end
+ end
+
+ context 'with engine' do
+ let(:directory) { 'dsl_table_with_engine_creation' }
+ it 'creates a table' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(2)
+ expect(current_schema).to have_key('id')
+ expect(current_schema).to have_key('date')
+ expect(current_schema['id'].sql_type).to eq('UInt32')
+ expect(current_schema['date'].sql_type).to eq('Date')
+ end
+ end
+
+ context 'types' do
+ context 'decimal' do
+ let(:directory) { 'dsl_table_with_decimal_creation' }
+ it 'creates a table with valid scale and precision' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(4)
+ expect(current_schema).to have_key('id')
+ expect(current_schema).to have_key('money')
+ expect(current_schema).to have_key('balance')
+ expect(current_schema['id'].sql_type).to eq('UInt32')
+ expect(current_schema['money'].sql_type).to eq('Nullable(Decimal(16, 4))')
+ expect(current_schema['balance'].sql_type).to eq('Decimal(32, 2)')
+ expect(current_schema['balance'].default).to eq(0.0)
+ expect(current_schema['paid'].default).to eq(1.15)
+ end
+ end
+
+ context 'uuid' do
+ let(:directory) { 'dsl_table_with_uuid_creation' }
+ it 'creates a table with uuid columns' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(2)
+ expect(current_schema).to have_key('col1')
+ expect(current_schema).to have_key('col2')
+ expect(current_schema['col1'].sql_type).to eq('UUID')
+ expect(current_schema['col2'].sql_type).to eq('Nullable(UUID)')
+ end
+ end
+
+ context 'codec' do
+ let(:directory) { 'dsl_table_with_codec' }
+ it 'creates a table with custom column' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema).to have_key('custom')
+ expect(current_schema['custom'].sql_type).to eq('Nullable(UInt64)')
+ expect(current_schema['custom'].codec).to eq('T64, LZ4')
+ end
+ end
+
+ context 'datetime' do
+ let(:directory) { 'dsl_table_with_datetime_creation' }
+ it 'creates a table with datetime columns' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(2)
+ expect(current_schema).to have_key('datetime')
+ expect(current_schema).to have_key('datetime64')
+ expect(current_schema['datetime'].sql_type).to eq('DateTime')
+ expect(current_schema['datetime'].default).to be_nil
+ expect(current_schema['datetime'].default_function).to eq('now()')
+ expect(current_schema['datetime64'].sql_type).to eq('Nullable(DateTime64(3))')
+ expect(current_schema['datetime64'].default).to be_nil
+ expect(current_schema['datetime64'].default_function).to eq('now64()')
+ end
+ end
+
+ context 'low_cardinality' do
+ let(:directory) { 'dsl_table_with_low_cardinality_creation' }
+ it 'creates a table with low cardinality columns' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(5)
+ expect(current_schema).to have_key('col1')
+ expect(current_schema).to have_key('col2')
+ expect(current_schema).to have_key('col3')
+ expect(current_schema).to have_key('col4')
+ expect(current_schema).to have_key('col5')
+ expect(current_schema['col1'].sql_type).to eq('LowCardinality(String)')
+ expect(current_schema['col1'].default).to eq('col')
+ expect(current_schema['col2'].sql_type).to eq('LowCardinality(Nullable(String))')
+ expect(current_schema['col3'].sql_type).to eq('Array(LowCardinality(Nullable(String)))')
+ expect(current_schema['col4'].sql_type).to eq('Map(String, LowCardinality(Nullable(String)))')
+ expect(current_schema['col5'].sql_type).to eq('Map(String, Array(LowCardinality(Nullable(String))))')
+ end
+ end
+
+ context 'fixed_string' do
+ let(:directory) { 'dsl_table_with_fixed_string_creation' }
+ it 'creates a table with fixed string columns' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(4)
+ expect(current_schema).to have_key('fixed_string1')
+ expect(current_schema).to have_key('fixed_string16_array')
+ expect(current_schema).to have_key('fixed_string16_map')
+ expect(current_schema).to have_key('fixed_string16_map_array')
+ expect(current_schema['fixed_string1'].sql_type).to eq('FixedString(1)')
+ expect(current_schema['fixed_string16_array'].sql_type).to eq('Array(Nullable(FixedString(16)))')
+ expect(current_schema['fixed_string16_map'].sql_type).to eq('Map(String, Nullable(FixedString(16)))')
+ expect(current_schema['fixed_string16_map_array'].sql_type).to eq('Map(String, Array(Nullable(FixedString(16))))')
+
+ end
+ end
+
+ context 'enum' do
+ let(:directory) { 'dsl_table_with_enum_creation' }
+ it 'creates a table with enum columns' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(3)
+ expect(current_schema).to have_key('enum8')
+ expect(current_schema).to have_key('enum16')
+ expect(current_schema).to have_key('enum_nullable')
+ expect(current_schema['enum8'].sql_type).to eq("Enum8('key1' = 1, 'key2' = 2)")
+ expect(current_schema['enum8'].default).to eq('key1')
+ expect(current_schema['enum16'].sql_type).to eq("Enum16('key1' = 1, 'key2' = 2)")
+ expect(current_schema['enum_nullable'].sql_type).to eq("Nullable(Enum8('key1' = 1, 'key2' = 2))")
+ end
+ end
+ end
+
+ context 'no database' do
+ before(:all) do
+ ActiveRecord::Base.establish_connection(connection_config.merge(database: 'test_not_exist'))
+ end
+
+ after(:all) do
+ ActiveRecord::Base.establish_connection(connection_config)
+ end
+
+ let(:directory) { 'plain_table_creation' }
+ it 'raise error' do
+ expect { subject }.to raise_error(ActiveRecord::NoDatabaseError)
+ end
+ end
+
+ context 'creates a view' do
+ let(:directory) { 'dsl_create_view_with_to_section' }
+ it 'creates a view' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some_view')
+ end
+ end
+
+ context 'drops a view' do
+ let(:directory) { 'dsl_create_view_without_to_section' }
+ it 'drops a view' do
+ subject
+
+ expect(ActiveRecord::Base.connection.tables).to include('some_view')
+
+ quietly do
+ migration_context.down
+ end
+
+ expect(ActiveRecord::Base.connection.tables).not_to include('some_view')
+ end
+ end
+
+ context 'with index' do
+ let(:directory) { 'dsl_create_table_with_index' }
+
+ it 'creates a table' do
+ quietly { migration_context.up(1) }
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to include('INDEX idx (int1 * int2, date) TYPE minmax GRANULARITY 3')
+
+ quietly { migration_context.up(2) }
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to_not include('INDEX idx')
+
+ quietly { migration_context.up(3) }
+
+ expect(ActiveRecord::Base.connection.show_create_table('some')).to include('INDEX idx2 int1 * int2 TYPE set(10) GRANULARITY 4')
+ end
+
+ it 'add index if not exists' do
+ subject
+
+ expect { ActiveRecord::Base.connection.add_index('some', 'int1 + int2', name: 'idx2', type: 'minmax', granularity: 1) }.to raise_error(ActiveRecord::ActiveRecordError, include('already exists'))
+
+ ActiveRecord::Base.connection.add_index('some', 'int1 + int2', name: 'idx2', type: 'minmax', granularity: 1, if_not_exists: true)
+ end
+
+ it 'drop index if exists' do
+ subject
+
+ expect { ActiveRecord::Base.connection.remove_index('some', 'idx3') }.to raise_error(ActiveRecord::ActiveRecordError, include('Cannot find index'))
+
+ ActiveRecord::Base.connection.remove_index('some', 'idx2')
+ end
+
+ it 'rebuid index' do
+ subject
+
+ expect { ActiveRecord::Base.connection.rebuild_index('some', 'idx3') }.to raise_error(ActiveRecord::ActiveRecordError, include('Unknown index'))
+
+ # expect { ActiveRecord::Base.connection.rebuild_index('some', 'idx3', if_exists: true) }.to_not raise_error
+
+ ActiveRecord::Base.connection.rebuild_index('some', 'idx2')
+ end
+
+ it 'clear index' do
+ subject
+
+ ActiveRecord::Base.connection.clear_index('some', 'idx2')
+ end
+ end
+ end
+ end
+
+ describe 'drop table' do
+ let(:directory) { 'dsl_drop_table' }
+ it 'drops table' do
+ quietly { migration_context.up(1) }
+
+ expect(ActiveRecord::Base.connection.tables).to include('some')
+
+ quietly { migration_context.up(2) }
+
+ expect(ActiveRecord::Base.connection.tables).not_to include('some')
+ end
+ end
+
+ describe 'drop table sync' do
+ it 'drops table' do
+ migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'dsl_drop_table_sync')
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir).up(1) }
+
+ expect(ActiveRecord::Base.connection.tables).to include('some')
+
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir).up(2) }
+
+ expect(ActiveRecord::Base.connection.tables).not_to include('some')
+ end
+ end
+
+ describe 'add column' do
+ let(:directory) { 'dsl_add_column' }
+ it 'adds a new column' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(3)
+ expect(current_schema).to have_key('id')
+ expect(current_schema).to have_key('date')
+ expect(current_schema).to have_key('new_column')
+ expect(current_schema['id'].sql_type).to eq('UInt32')
+ expect(current_schema['date'].sql_type).to eq('Date')
+ expect(current_schema['new_column'].sql_type).to eq('Nullable(UInt64)')
+ end
+ end
+
+ describe 'drop column' do
+ let(:directory) { 'dsl_drop_column' }
+ it 'drops column' do
+ subject
+
+ current_schema = schema(model)
+
+ expect(current_schema.keys.count).to eq(1)
+ expect(current_schema).to have_key('date')
+ expect(current_schema['date'].sql_type).to eq('Date')
+ end
+ end
+
+ context 'function creation' do
+ after do
+ ActiveRecord::Base.connection.drop_functions
+ end
+
+ context 'plain' do
+ let(:directory) { 'plain_function_creation' }
+ it 'creates a function' do
+ subject
+
+ expect(ActiveRecord::Base.connection.functions).to match_array(['addFun', 'multFun'])
+ end
+ end
+
+ context 'dsl' do
+ let(:directory) { 'dsl_create_function' }
+ it 'creates a function' do
+ ActiveRecord::Base.connection.do_execute('CREATE FUNCTION forced_fun AS (x, k, b) -> k*x + b', format: nil)
+
+ subject
+
+ expect(ActiveRecord::Base.connection.functions).to match_array(['forced_fun', 'some_fun'])
+ expect(ActiveRecord::Base.connection.show_create_function('forced_fun').chomp).to eq('CREATE FUNCTION forced_fun AS (x, y) -> (x + y)')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/single/model_spec.rb b/spec/single/model_spec.rb
new file mode 100644
index 00000000..d32b3320
--- /dev/null
+++ b/spec/single/model_spec.rb
@@ -0,0 +1,508 @@
+# frozen_string_literal: true
+
+RSpec.describe 'Model', :migrations do
+
+ class ModelJoin < ActiveRecord::Base
+ self.table_name = 'joins'
+ belongs_to :model, class_name: 'Model'
+ end
+ class Model < ActiveRecord::Base
+ self.table_name = 'sample'
+ has_many :joins, class_name: 'ModelJoin', primary_key: 'event_name'
+ end
+ class ModelPk < ActiveRecord::Base
+ self.table_name = 'sample'
+ self.primary_key = 'event_name'
+ end
+ IS_NEW_CLICKHOUSE_SERVER = Model.connection.server_version.to_f >= 23.4
+
+ let(:date) { Date.today }
+
+ context 'sample' do
+
+ before do
+ migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_sample_data')
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir).up }
+ end
+
+ if IS_NEW_CLICKHOUSE_SERVER
+ it "detect primary key" do
+ expect(Model.primary_key).to eq('event_name')
+ end
+ end
+
+ it 'DB::Exception in row value' do
+ Model.create!(event_name: 'DB::Exception')
+ expect(Model.first.event_name).to eq('DB::Exception')
+ end
+
+ describe '#do_execute' do
+ it 'returns formatted result' do
+ result = Model.connection.do_execute('SELECT 1 AS t')
+ expect(result['data']).to eq([[1]])
+ expect(result['meta']).to eq([{ 'name' => 't', 'type' => 'UInt8' }])
+ end
+
+ context 'with JSONCompact format' do
+ it 'returns formatted result' do
+ result = Model.connection.do_execute('SELECT 1 AS t', format: 'JSONCompact')
+ expect(result['data']).to eq([[1]])
+ expect(result['meta']).to eq([{ 'name' => 't', 'type' => 'UInt8' }])
+ end
+ end
+
+ context 'with JSONCompactEachRowWithNamesAndTypes format' do
+ it 'returns formatted result' do
+ result = Model.connection.do_execute('SELECT 1 AS t', format: 'JSONCompactEachRowWithNamesAndTypes')
+ expect(result['data']).to eq([[1]])
+ expect(result['meta']).to eq([{ 'name' => 't', 'type' => 'UInt8' }])
+ end
+ end
+ end
+
+ describe '#create' do
+ it 'creates a new record' do
+ expect {
+ Model.create!(
+ event_name: 'some event',
+ date: date
+ )
+ }.to change { Model.count }
+ end
+
+ it 'insert all' do
+ if ActiveRecord::version >= Gem::Version.new('6')
+ Model.insert_all([
+ {event_name: 'some event 1', date: date},
+ {event_name: 'some event 2', date: date},
+ ])
+ expect(Model.count).to eq(2)
+ end
+ end
+ end
+
+ describe '#update' do
+ let!(:record) { Model.create!(event_name: 'some event', event_value: 1, date: date) }
+
+ it 'update' do
+ expect {
+ Model.where(event_name: 'some event').update_all(event_value: 2)
+ }.to_not raise_error
+ end
+
+ it 'update model with primary key' do
+ expect {
+ if IS_NEW_CLICKHOUSE_SERVER
+ Model.first.update!(event_value: 2)
+ else
+ ModelPk.first.update!(event_value: 2)
+ end
+ }.to_not raise_error
+ end
+ end
+
+ describe '#delete' do
+ let!(:record) { Model.create!(event_name: 'some event', date: date) }
+
+ it 'scope' do
+ expect {
+ Model.where(event_name: 'some event').delete_all
+ }.to_not raise_error
+ end
+
+ it 'destroy model with primary key' do
+ expect {
+ if IS_NEW_CLICKHOUSE_SERVER
+ Model.first.destroy!
+ else
+ ModelPk.first.destroy!
+ end
+ }.to_not raise_error
+ end
+ end
+
+ describe '#find_by' do
+ let!(:record) { Model.create!(event_name: 'some event', date: Date.current, datetime: Time.now) }
+
+ it 'finds the record' do
+ expect(Model.find_by(event_name: 'some event').attributes).to eq(record.attributes)
+ end
+ end
+
+ describe '#reverse_order!' do
+ it 'blank' do
+ expect(Model.all.reverse_order!.map(&:event_name)).to eq([])
+ end
+
+ it 'select' do
+ Model.create!(event_name: 'some event 1', date: 1.day.ago)
+ Model.create!(event_name: 'some event 2', date: 2.day.ago)
+ if IS_NEW_CLICKHOUSE_SERVER
+ expect(Model.all.reverse_order!.to_sql).to eq('SELECT sample.* FROM sample ORDER BY sample.event_name DESC')
+ expect(Model.all.reverse_order!.map(&:event_name)).to eq(['some event 2', 'some event 1'])
+ else
+ expect(Model.all.reverse_order!.to_sql).to eq('SELECT sample.* FROM sample ORDER BY sample.date DESC')
+ expect(Model.all.reverse_order!.map(&:event_name)).to eq(['some event 1', 'some event 2'])
+ end
+ end
+ end
+
+ describe 'convert type with aggregations' do
+ let!(:record1) { Model.create!(event_name: 'some event', event_value: 1, date: date) }
+ let!(:record2) { Model.create!(event_name: 'some event', event_value: 3, date: date) }
+
+ it 'integer' do
+ expect(Model.select(Arel.sql('sum(event_value) AS event_value'))[0].event_value.class).to eq(Integer)
+ expect(Model.select(Arel.sql('sum(event_value) AS value'))[0].attributes['value'].class).to eq(Integer)
+ expect(Model.pluck(Arel.sql('sum(event_value)')).first[0].class).to eq(Integer)
+ end
+ end
+
+ describe 'boolean column type' do
+ let!(:record1) { Model.create!(event_name: 'some event', event_value: 1, date: date) }
+
+ it 'bool result' do
+ expect(Model.first.enabled.class).to eq(FalseClass)
+ end
+
+ it 'is mapped to :boolean' do
+ type = Model.columns_hash['enabled'].type
+ expect(type).to eq(:boolean)
+ end
+ end
+
+ describe 'string column type as byte array' do
+ let(:bytes) { (0..255).to_a }
+ let!(:record1) { Model.create!(event_name: 'some event', byte_array: bytes.pack('C*')) }
+
+ it 'keeps all bytes' do
+ returned_byte_array = Model.first.byte_array
+
+ expect(returned_byte_array.unpack('C*')).to eq(bytes)
+ end
+ end
+
+ describe 'UUID column type' do
+ let(:random_uuid) { SecureRandom.uuid }
+ let!(:record1) do
+ Model.create!(event_name: 'some event', event_value: 1, date: date, relation_uuid: random_uuid)
+ end
+
+ it 'is mapped to :uuid' do
+ type = Model.columns_hash['relation_uuid'].type
+ expect(type).to eq(:uuid)
+ end
+
+ it 'accepts proper value' do
+ expect(record1.relation_uuid).to eq(random_uuid)
+ end
+
+ it 'accepts non-canonical uuid' do
+ record1.relation_uuid = 'ABCD-0123-4567-89EF-dead-beef-0101-1010'
+ expect(record1.relation_uuid).to eq('abcd0123-4567-89ef-dead-beef01011010')
+ end
+
+ it 'does not accept invalid values' do
+ record1.relation_uuid = 'invalid-uuid'
+ expect(record1.relation_uuid).to be_nil
+ end
+ end
+
+ describe 'decimal column type' do
+ let!(:record1) do
+ Model.create!(event_name: 'some event', decimal_value: BigDecimal('95891.74'))
+ end
+
+ # If converted to float, the value would be 9589174.000000001. This happened previously
+ # due to JSON parsing of numeric values to floats.
+ it 'keeps precision' do
+ decimal_value = Model.first.decimal_value
+ expect(decimal_value).to eq(BigDecimal('95891.74'))
+ end
+ end
+
+ describe '#settings' do
+ it 'works' do
+ sql = Model.settings(optimize_read_in_order: 1, cast_keep_nullable: 1).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample SETTINGS optimize_read_in_order = 1, cast_keep_nullable = 1')
+ end
+
+ it 'quotes' do
+ sql = Model.settings(foo: :bar).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample SETTINGS foo = \'bar\'')
+ end
+
+ it 'allows passing the symbol :default to reset a setting' do
+ sql = Model.settings(max_insert_block_size: :default).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample SETTINGS max_insert_block_size = DEFAULT')
+ end
+ end
+
+ describe '#using' do
+ it 'works' do
+ sql = Model.joins(:joins).using(:event_name, :date).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample INNER JOIN joins USING event_name,date')
+ end
+
+ it 'works with filters' do
+ sql = Model.joins(:joins).using(:event_name, :date).where(joins: { event_value: 1 }).to_sql
+ expect(sql).to eq("SELECT sample.* FROM sample INNER JOIN joins USING event_name,date WHERE joins.event_value = 1")
+ end
+ end
+
+ describe '#window' do
+ it 'works' do
+ sql = Model.window('x', order: 'date', partition: 'name', rows: 'UNBOUNDED PRECEDING').select('sum(event_value) OVER x').to_sql
+ expect(sql).to eq('SELECT sum(event_value) OVER x FROM sample WINDOW x AS (PARTITION BY name ORDER BY date ROWS UNBOUNDED PRECEDING)')
+ end
+
+ it 'empty' do
+ sql = Model.window('x').select('sum(event_value) OVER x').to_sql
+ expect(sql).to eq('SELECT sum(event_value) OVER x FROM sample WINDOW x AS ()')
+ end
+ end
+
+ describe 'arel predicates' do
+ describe '#matches' do
+ it 'uses ilike for case insensitive matches' do
+ sql = Model.where(Model.arel_table[:event_name].matches('some event')).to_sql
+ expect(sql).to eq("SELECT sample.* FROM sample WHERE sample.event_name ILIKE 'some event'")
+ end
+
+ it 'uses like for case sensitive matches' do
+ sql = Model.where(Model.arel_table[:event_name].matches('some event', nil, true)).to_sql
+ expect(sql).to eq("SELECT sample.* FROM sample WHERE sample.event_name LIKE 'some event'")
+ end
+ end
+ end
+
+ describe 'DateTime64 create' do
+ it 'create a new record' do
+ time = DateTime.parse('2023-07-21 08:00:00.123')
+ Model.create!(datetime: time, datetime64: time)
+ row = Model.first
+ expect(row.datetime).to_not eq(row.datetime64)
+ expect(row.datetime.strftime('%Y-%m-%d %H:%M:%S')).to eq('2023-07-21 08:00:00')
+ expect(row.datetime64.strftime('%Y-%m-%d %H:%M:%S.%3N')).to eq('2023-07-21 08:00:00.123')
+ end
+ end
+
+ describe 'final request' do
+ let!(:record1) { Model.create!(date: date, event_name: '1') }
+ let!(:record2) { Model.create!(date: date, event_name: '1') }
+
+ it 'select' do
+ expect(Model.count).to eq(2)
+ expect(Model.final.count).to eq(1)
+ expect(Model.final!.count).to eq(1)
+ expect(Model.final.where(date: '2023-07-21').to_sql).to eq('SELECT sample.* FROM sample FINAL WHERE sample.date = \'2023-07-21\'')
+ end
+ end
+
+ describe '#limit_by' do
+ it 'works' do
+ sql = Model.limit_by(1, :event_name).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample LIMIT 1 BY event_name')
+ end
+
+ it 'works with limit' do
+ sql = Model.limit(1).limit_by(1, :event_name).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample LIMIT 1 BY event_name LIMIT 1')
+ end
+ end
+
+ describe '#group_by_grouping_sets' do
+ it 'raises an error with no arguments' do
+ expect { Model.group_by_grouping_sets }.to raise_error(ArgumentError, 'The method .group_by_grouping_sets() must contain arguments.')
+ end
+
+ it 'works with the empty grouping set' do
+ sql = Model.group_by_grouping_sets([]).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample GROUP BY GROUPING SETS ( ( ) )')
+ end
+
+ it 'accepts strings' do
+ sql = Model.group_by_grouping_sets(%w[foo bar], %w[baz]).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample GROUP BY GROUPING SETS ( ( foo, bar ), ( baz ) )')
+ end
+
+ it 'accepts symbols' do
+ sql = Model.group_by_grouping_sets(%i[foo bar], %i[baz]).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample GROUP BY GROUPING SETS ( ( foo, bar ), ( baz ) )')
+ end
+
+ it 'accepts Arel nodes' do
+ sql = Model.group_by_grouping_sets([Model.arel_table[:foo], Model.arel_table[:bar]], [Model.arel_table[:baz]]).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample GROUP BY GROUPING SETS ( ( sample.foo, sample.bar ), ( sample.baz ) )')
+ end
+
+ it 'accepts mixed arguments' do
+ sql = Model.group_by_grouping_sets(['foo', :bar], [Model.arel_table[:baz]]).to_sql
+ expect(sql).to eq('SELECT sample.* FROM sample GROUP BY GROUPING SETS ( ( foo, bar ), ( sample.baz ) )')
+ end
+ end
+ end
+
+ context 'sample with id column' do
+ class ModelWithoutPrimaryKey < ActiveRecord::Base
+ self.table_name = 'sample_without_key'
+ end
+
+ before do
+ migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_sample_data_without_primary_key')
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir).up }
+ end
+
+ it 'detect primary key' do
+ expect(ModelWithoutPrimaryKey.primary_key).to eq(nil)
+ end
+
+ describe '#delete' do
+ let!(:record) { ModelWithoutPrimaryKey.create!(event_name: 'some event', date: date) }
+
+ it 'model destroy' do
+ expect {
+ record.destroy!
+ }.to raise_error(ActiveRecord::ActiveRecordError, 'Deleting a row is not possible without a primary key')
+ end
+
+ it 'scope' do
+ expect {
+ ModelWithoutPrimaryKey.where(event_name: 'some event').delete_all
+ }.to_not raise_error
+ end
+ end
+ end
+
+ context 'array' do
+ let!(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'actions'
+ end
+ end
+
+ before do
+ migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_array_datetime')
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir).up }
+ end
+
+ describe '#create' do
+ it 'creates a new record' do
+ expect {
+ model.create!(
+ array_datetime: [1.day.ago, Time.now, '2022-12-06 15:22:49'],
+ array_string: %w[asdf jkl],
+ array_int: [1, 2],
+ date: date
+ )
+ }.to change { model.count }
+ event = model.first
+ expect(event.array_datetime.is_a?(Array)).to be_truthy
+ expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
+ expect(event.array_string[0].is_a?(String)).to be_truthy
+ expect(event.array_string).to eq(%w[asdf jkl])
+ expect(event.array_int.is_a?(Array)).to be_truthy
+ expect(event.array_int).to eq([1, 2])
+ end
+
+ it 'create with insert all' do
+ expect {
+ model.insert_all([{
+ array_datetime: [1.day.ago, Time.now, '2022-12-06 15:22:49'],
+ array_string: %w[asdf jkl],
+ array_int: [1, 2],
+ date: date
+ }])
+ }.to change { model.count }
+ end
+
+ it 'get record' do
+ model.connection.insert("INSERT INTO #{model.table_name} (id, array_datetime, date) VALUES (1, '[''2022-12-06 15:22:49'',''2022-12-05 15:22:49'']', '2022-12-06')")
+ expect(model.count).to eq(1)
+ event = model.first
+ expect(event.date.is_a?(Date)).to be_truthy
+ expect(event.date).to eq(Date.parse('2022-12-06'))
+ expect(event.array_datetime.is_a?(Array)).to be_truthy
+ expect(event.array_datetime[0].is_a?(DateTime)).to be_truthy
+ expect(event.array_datetime[0]).to eq('2022-12-06 15:22:49')
+ expect(event.array_datetime[1]).to eq('2022-12-05 15:22:49')
+ end
+ end
+ end
+
+ context 'map' do
+ let!(:model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'verbs'
+ end
+ end
+
+ before do
+ migrations_dir = File.join(FIXTURES_PATH, 'migrations', 'add_map_datetime')
+ quietly { ActiveRecord::MigrationContext.new(migrations_dir).up }
+ end
+
+ describe '#create' do
+ it 'creates a new record' do
+ expect {
+ model.create!(
+ map_datetime: {a: 1.day.ago, b: Time.now, c: '2022-12-06 15:22:49'},
+ map_string: {a: 'asdf', b: 'jkl' },
+ map_int: {a: 1, b: 2},
+ map_array_datetime: {a: [1.day.ago], b: [Time.now, '2022-12-06 15:22:49']},
+ map_array_string: {a: ['str'], b: ['str1', 'str2']},
+ map_array_int: {a: [1], b: [1, 2, 3]},
+ date: date
+ )
+ }.to change { model.count }.by(1)
+
+ record = model.first
+ expect(record.map_datetime).to be_a Hash
+ expect(record.map_string).to be_a Hash
+ expect(record.map_int).to be_a Hash
+ expect(record.map_array_datetime).to be_a Hash
+ expect(record.map_array_string).to be_a Hash
+ expect(record.map_array_int).to be_a Hash
+
+ expect(record.map_datetime['a']).to be_a DateTime
+ expect(record.map_string['a']).to be_a String
+ expect(record.map_string).to eq({'a' => 'asdf', 'b' => 'jkl'})
+ expect(record.map_int).to eq({'a' => 1, 'b' => 2})
+
+ expect(record.map_array_datetime['b']).to be_a Array
+ expect(record.map_array_string['b']).to be_a Array
+ expect(record.map_array_int['b']).to be_a Array
+ end
+
+ it 'create with insert all' do
+ expect {
+ model.insert_all([{
+ map_datetime: {a: 1.day.ago, b: Time.now, c: '2022-12-06 15:22:49'},
+ map_string: {a: 'asdf', b: 'jkl' },
+ map_int: {a: 1, b: 2},
+ map_array_datetime: {a: [1.day.ago], b: [Time.now, '2022-12-06 15:22:49']},
+ map_array_string: {a: ['str'], b: ['str1', 'str2']},
+ map_array_int: {a: [1], b: [1, 2, 3]},
+ date: date
+ }])
+ }.to change { model.count }.by(1)
+ end
+
+ it 'get record' do
+ model.connection.insert("INSERT INTO #{model.table_name} (id, map_datetime, map_array_datetime, date) VALUES (1, {'a': '2022-12-05 15:22:49', 'b': '2024-01-01 12:00:08'}, {'c': ['2022-12-05 15:22:49','2024-01-01 12:00:08']}, '2022-12-06')")
+ expect(model.count).to eq(1)
+ record = model.first
+ expect(record.date.is_a?(Date)).to be_truthy
+ expect(record.date).to eq(Date.parse('2022-12-06'))
+ expect(record.map_datetime).to be_a Hash
+ expect(record.map_datetime['a'].is_a?(DateTime)).to be_truthy
+ expect(record.map_datetime['a']).to eq(DateTime.parse('2022-12-05 15:22:49'))
+ expect(record.map_datetime['b']).to eq(DateTime.parse('2024-01-01 12:00:08'))
+ expect(record.map_array_datetime).to be_a Hash
+ expect(record.map_array_datetime['c']).to be_a Array
+ expect(record.map_array_datetime['c'][0]).to eq(DateTime.parse('2022-12-05 15:22:49'))
+ expect(record.map_array_datetime['c'][1]).to eq(DateTime.parse('2024-01-01 12:00:08'))
+ end
+ end
+ end
+end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 0e6170b0..4ee2c5bc 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -9,12 +9,12 @@
ClickhouseActiverecord.load
FIXTURES_PATH = File.join(File.dirname(__FILE__), 'fixtures')
-CLUSTER_NAME = 'test'
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = '.rspec_status'
config.include ActiveSupport::Testing::Stream
+ config.raise_errors_for_deprecations!
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
@@ -38,10 +38,11 @@
default: {
adapter: 'clickhouse',
host: 'localhost',
- port: 8123,
- database: 'test',
- username: nil,
- password: nil
+ port: ENV['CLICKHOUSE_PORT'] || 8123,
+ database: ENV['CLICKHOUSE_DATABASE'] || 'test',
+ username: ENV['CLICKHOUSE_USER'],
+ password: ENV['CLICKHOUSE_PASSWORD'],
+ cluster_name: ENV['CLICKHOUSE_CLUSTER'],
}
)
@@ -55,20 +56,9 @@ def schema(model)
end
def clear_db
- if ActiveRecord::version >= Gem::Version.new('6')
- cluster = ActiveRecord::Base.connection_db_config.configuration_hash[:cluster_name]
- else
- cluster = ActiveRecord::Base.connection_config[:cluster_name]
- end
- pattern = if cluster
- normalized_cluster_name = cluster.start_with?('{') ? "'#{cluster}'" : cluster
-
- "DROP TABLE %s ON CLUSTER #{normalized_cluster_name}"
- else
- 'DROP TABLE %s'
- end
-
- ActiveRecord::Base.connection.tables.each { |table| ActiveRecord::Base.connection.execute(pattern % table) }
+ ActiveRecord::Base.connection.tables.each { |table| ActiveRecord::Base.connection.drop_table(table, sync: true) }
+rescue ActiveRecord::NoDatabaseError
+ # Ignored
end
def clear_consts