sudo apt install zsh-autosuggestions zsh-syntax-highlighting zsh
| ################################################################## | |
| # /etc/elasticsearch/elasticsearch.yml | |
| # | |
| # Base configuration for a write heavy cluster | |
| # | |
| # Cluster / Node Basics | |
| cluster.name: logng | |
| # Node can have abritrary attributes we can use for routing |
| #!/bin/bash | |
| if [ -z $1 ]; then | |
| echo "usage: $0 APP_SEARCH_HOST APP_SEARCH_AUTH ES_HOST ENGINE_NAME" | |
| exit 1 | |
| else | |
| APP_SEARCH_HOST=$1 | |
| fi | |
| if [ -z $2 ]; then |
| import $ivy.`org.scalaz::scalaz-core:7.2.+` | |
| import $ivy.`org.json4s::json4s-jackson:3.+` | |
| import $ivy.`org.json4s::json4s-ext:3.+` | |
| import $ivy.`com.github.nscala-time::nscala-time:2.18.+` | |
| import scalaz._ | |
| import Scalaz._ | |
| import com.github.nscala_time.time.Imports._ | |
| import scala.io.Source | |
| import org.json4s._ | |
| import org.json4s.jackson.JsonMethods._ |
| version: '2' | |
| services: | |
| elasticsearch1: | |
| image: docker.elastic.co/elasticsearch/elasticsearch:6.2.2 | |
| container_name: es1 | |
| environment: | |
| - cluster.name=docker-cluster | |
| - bootstrap.memory_lock=true | |
| - "ES_JAVA_OPTS=-Xms512m -Xmx512m" | |
| ulimits: |
| #!/bin/sh | |
| # Make sure to: | |
| # 1) Name this file `backup.sh` and place it in /home/ubuntu | |
| # 2) Run sudo apt-get install awscli to install the AWSCLI | |
| # 3) Run aws configure (enter s3-authorized IAM user and specify region) | |
| # 4) Fill in DB host + name | |
| # 5) Create S3 bucket for the backups and fill it in below (set a lifecycle rule to expire files older than X days in the bucket) | |
| # 6) Run chmod +x backup.sh | |
| # 7) Test it out via ./backup.sh |
| published | tags | |||
|---|---|---|---|---|
true |
|
這篇文章,基本上是參考這篇Scala Case Classes In Depth來寫的,閱讀吸收後以我的例子與描述方式以中文寫下。