Fix/elasticsearch plugin ci failure (#297)
* Revert "Fix search test"
This reverts commit f638163eca
.
* Add Elasticsearch CI setting
* Revert development settings for public_statuses_index and statuses_index
This commit is contained in:
parent
eaa9ade59b
commit
866f93b90e
5 changed files with 32 additions and 216 deletions
|
@ -4,7 +4,4 @@ NODE_ENV=production
|
|||
LOCAL_DOMAIN=cb6e6126.ngrok.io
|
||||
LOCAL_HTTPS=true
|
||||
# Elasticsearch
|
||||
ES_ENABLED=false
|
||||
ES_HOST=localhost
|
||||
ES_PORT=9200
|
||||
ES_PREFIX=test
|
||||
|
|
39
.github/workflows/test-ruby.yml
vendored
39
.github/workflows/test-ruby.yml
vendored
|
@ -259,19 +259,6 @@ jobs:
|
|||
ports:
|
||||
- 6379:6379
|
||||
|
||||
search:
|
||||
image: ${{ matrix.search-image }}
|
||||
env:
|
||||
discovery.type: single-node
|
||||
xpack.security.enabled: false
|
||||
options: >-
|
||||
--health-cmd "curl http://localhost:9200/_cluster/health"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 10
|
||||
ports:
|
||||
- 9200:9200
|
||||
|
||||
env:
|
||||
DB_HOST: localhost
|
||||
DB_USER: postgres
|
||||
|
@ -313,6 +300,32 @@ jobs:
|
|||
- name: Set up Javascript environment
|
||||
uses: ./.github/actions/setup-javascript
|
||||
|
||||
- name: Configure sysctl limits
|
||||
run: |
|
||||
sudo swapoff -a
|
||||
sudo sysctl -w vm.swappiness=1
|
||||
sudo sysctl -w fs.file-max=262144
|
||||
sudo sysctl -w vm.max_map_count=262144
|
||||
|
||||
- name: Install Elasticsearch
|
||||
run: |
|
||||
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.17.10-amd64.deb
|
||||
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-7.17.10-amd64.deb.sha512
|
||||
shasum -a 512 -c elasticsearch-7.17.10-amd64.deb.sha512
|
||||
sudo dpkg -i elasticsearch-7.17.10-amd64.deb
|
||||
sudo /usr/share/elasticsearch/bin/elasticsearch-plugin install https://github.com/WorksApplications/elasticsearch-sudachi/releases/download/v3.1.0/elasticsearch-7.17.10-analysis-sudachi-3.1.0.zip
|
||||
|
||||
- name: Install dictionary
|
||||
run: |
|
||||
wget http://sudachi.s3-website-ap-northeast-1.amazonaws.com/sudachidict/sudachi-dictionary-latest-core.zip
|
||||
unzip sudachi-dictionary-latest-core.zip
|
||||
sudo mkdir /etc/elasticsearch/sudachi -p
|
||||
sudo cp sudachi-dictionary-*/system_core.dic /etc/elasticsearch/sudachi
|
||||
|
||||
- name: Running Elasticsearch
|
||||
run: |
|
||||
sudo systemctl start elasticsearch
|
||||
|
||||
- name: Load database schema
|
||||
run: './bin/rails db:create db:schema:load db:seed'
|
||||
|
||||
|
|
|
@ -1,72 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class AccountsIndex < Chewy::Index
|
||||
DEVELOPMENT_SETTINGS = {
|
||||
filter: {
|
||||
english_stop: {
|
||||
type: 'stop',
|
||||
stopwords: '_english_',
|
||||
},
|
||||
|
||||
english_stemmer: {
|
||||
type: 'stemmer',
|
||||
language: 'english',
|
||||
},
|
||||
|
||||
english_possessive_stemmer: {
|
||||
type: 'stemmer',
|
||||
language: 'possessive_english',
|
||||
},
|
||||
},
|
||||
|
||||
analyzer: {
|
||||
natural: {
|
||||
tokenizer: 'standard',
|
||||
filter: %w(
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
elision
|
||||
english_possessive_stemmer
|
||||
english_stop
|
||||
english_stemmer
|
||||
),
|
||||
},
|
||||
|
||||
sudachi_analyzer: {
|
||||
tokenizer: 'standard',
|
||||
filter: %w(
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
elision
|
||||
english_possessive_stemmer
|
||||
english_stop
|
||||
english_stemmer
|
||||
),
|
||||
},
|
||||
|
||||
verbatim: {
|
||||
tokenizer: 'standard',
|
||||
filter: %w(lowercase asciifolding cjk_width),
|
||||
},
|
||||
|
||||
edge_ngram: {
|
||||
tokenizer: 'edge_ngram',
|
||||
filter: %w(lowercase asciifolding cjk_width),
|
||||
},
|
||||
},
|
||||
|
||||
tokenizer: {
|
||||
edge_ngram: {
|
||||
type: 'edge_ngram',
|
||||
min_gram: 1,
|
||||
max_gram: 15,
|
||||
},
|
||||
},
|
||||
}.freeze
|
||||
|
||||
PRODUCTION_SETTINGS = {
|
||||
settings index: index_preset(refresh_interval: '30s'), analysis: {
|
||||
filter: {
|
||||
english_stop: {
|
||||
type: 'stop',
|
||||
|
@ -142,9 +77,7 @@ class AccountsIndex < Chewy::Index
|
|||
discard_punctuation: 'true',
|
||||
},
|
||||
},
|
||||
}.freeze
|
||||
|
||||
settings index: index_preset(refresh_interval: '30s'), analysis: Rails.env.test? ? DEVELOPMENT_SETTINGS : PRODUCTION_SETTINGS
|
||||
}
|
||||
|
||||
index_scope ::Account.searchable.includes(:account_stat)
|
||||
|
||||
|
|
|
@ -1,69 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class PublicStatusesIndex < Chewy::Index
|
||||
DEVELOPMENT_SETTINGS = {
|
||||
filter: {
|
||||
english_stop: {
|
||||
type: 'stop',
|
||||
stopwords: '_english_',
|
||||
},
|
||||
|
||||
english_stemmer: {
|
||||
type: 'stemmer',
|
||||
language: 'english',
|
||||
},
|
||||
|
||||
english_possessive_stemmer: {
|
||||
type: 'stemmer',
|
||||
language: 'possessive_english',
|
||||
},
|
||||
},
|
||||
|
||||
analyzer: {
|
||||
verbatim: {
|
||||
tokenizer: 'uax_url_email',
|
||||
filter: %w(lowercase),
|
||||
},
|
||||
|
||||
content: {
|
||||
tokenizer: 'standard',
|
||||
filter: %w(
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
elision
|
||||
english_possessive_stemmer
|
||||
english_stop
|
||||
english_stemmer
|
||||
),
|
||||
},
|
||||
|
||||
sudachi_analyzer: {
|
||||
tokenizer: 'standard',
|
||||
filter: %w(
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
elision
|
||||
english_possessive_stemmer
|
||||
english_stop
|
||||
english_stemmer
|
||||
),
|
||||
},
|
||||
|
||||
hashtag: {
|
||||
tokenizer: 'keyword',
|
||||
filter: %w(
|
||||
word_delimiter_graph
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
),
|
||||
},
|
||||
},
|
||||
}.freeze
|
||||
|
||||
PRODUCTION_SETTINGS = {
|
||||
settings index: index_preset(refresh_interval: '30s', number_of_shards: 5), analysis: {
|
||||
filter: {
|
||||
english_stop: {
|
||||
type: 'stop',
|
||||
|
@ -137,9 +75,7 @@ class PublicStatusesIndex < Chewy::Index
|
|||
discard_punctuation: 'true',
|
||||
},
|
||||
},
|
||||
}.freeze
|
||||
|
||||
settings index: index_preset(refresh_interval: '30s', number_of_shards: 5), analysis: Rails.env.test? ? DEVELOPMENT_SETTINGS : PRODUCTION_SETTINGS
|
||||
}
|
||||
|
||||
index_scope ::Status.unscoped
|
||||
.kept
|
||||
|
|
|
@ -1,68 +1,7 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
class StatusesIndex < Chewy::Index
|
||||
DEVELOPMENT_SETTINGS = {
|
||||
filter: {
|
||||
english_stop: {
|
||||
type: 'stop',
|
||||
stopwords: '_english_',
|
||||
},
|
||||
|
||||
english_stemmer: {
|
||||
type: 'stemmer',
|
||||
language: 'english',
|
||||
},
|
||||
|
||||
english_possessive_stemmer: {
|
||||
type: 'stemmer',
|
||||
language: 'possessive_english',
|
||||
},
|
||||
},
|
||||
analyzer: {
|
||||
verbatim: {
|
||||
tokenizer: 'uax_url_email',
|
||||
filter: %w(lowercase),
|
||||
},
|
||||
|
||||
content: {
|
||||
tokenizer: 'standard',
|
||||
filter: %w(
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
elision
|
||||
english_possessive_stemmer
|
||||
english_stop
|
||||
english_stemmer
|
||||
),
|
||||
},
|
||||
|
||||
sudachi_analyzer: {
|
||||
tokenizer: 'standard',
|
||||
filter: %w(
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
elision
|
||||
english_possessive_stemmer
|
||||
english_stop
|
||||
english_stemmer
|
||||
),
|
||||
},
|
||||
|
||||
hashtag: {
|
||||
tokenizer: 'keyword',
|
||||
filter: %w(
|
||||
word_delimiter_graph
|
||||
lowercase
|
||||
asciifolding
|
||||
cjk_width
|
||||
),
|
||||
},
|
||||
},
|
||||
}.freeze
|
||||
|
||||
PRODUCTION_SETTINGS = {
|
||||
settings index: index_preset(refresh_interval: '30s', number_of_shards: 5), analysis: {
|
||||
filter: {
|
||||
english_stop: {
|
||||
type: 'stop',
|
||||
|
@ -140,9 +79,7 @@ class StatusesIndex < Chewy::Index
|
|||
discard_punctuation: 'true',
|
||||
},
|
||||
},
|
||||
}.freeze
|
||||
|
||||
settings index: index_preset(refresh_interval: '30s', number_of_shards: 5), analysis: Rails.env.test? ? DEVELOPMENT_SETTINGS : PRODUCTION_SETTINGS
|
||||
}
|
||||
|
||||
index_scope ::Status.unscoped.kept.without_reblogs.includes(
|
||||
:account,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue