mirror of
https://github.com/elastic/eland.git
synced 2025-07-11 00:02:14 +08:00
Merge remote-tracking branch 'upstream/master' into feature/fix_nested_not_filters
This commit is contained in:
commit
57857277cd
10
.ci/Dockerfile
Normal file
10
.ci/Dockerfile
Normal file
@ -0,0 +1,10 @@
|
||||
ARG PYTHON_VERSION=3.7
|
||||
FROM python:${PYTHON_VERSION}
|
||||
|
||||
WORKDIR /code/eland
|
||||
COPY requirements-dev.txt .
|
||||
RUN pip install -r requirements-dev.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
|
20
.ci/certs/ca.crt
Executable file
20
.ci/certs/ca.crt
Executable file
@ -0,0 +1,20 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDSTCCAjGgAwIBAgIUIwN+0zglsexRKwE1RGHvlCcmrdwwDQYJKoZIhvcNAQEL
|
||||
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
|
||||
cmF0ZWQgQ0EwHhcNMTkwMjEzMDcyMjQwWhcNMjIwMjEyMDcyMjQwWjA0MTIwMAYD
|
||||
VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC
|
||||
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANILs0JO0e7x29zeVx21qalK
|
||||
XKdX+AMlGJPH75wWO/Jq6YHtxt1wYIg762krOBXfG6JsFSOIwIv5VrzGGRGjSPt9
|
||||
OXQyXrDDiQvsBT3rpzLNdDs7KMl2tZswwv7w9ujgud0cYnS1MOpn81rfPc73DvMg
|
||||
xuhplofDx6fn3++PjVRU2FNiIVWyEoaxRjCeGPMBubKZYaYbQA6vYM4Z+ByG727B
|
||||
AyAER3t7xmvYti/EoO2hv2HQk5zgcj/Oq3AJKhnt8LH8fnfm3TnYNM1htvXqhN05
|
||||
vsvhvm2PHfnA5qLlSr/3W0aI/U/PqfsFDCgyRV097sMIaKkmavb0Ue7aQ7lgtp0C
|
||||
AwEAAaNTMFEwHQYDVR0OBBYEFDRKlCMowWR1rwxE0d1lTEQe5O71MB8GA1UdIwQY
|
||||
MBaAFDRKlCMowWR1rwxE0d1lTEQe5O71MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI
|
||||
hvcNAQELBQADggEBAKbCJ95EBpeuvF70KEt6QU70k/SH1NRvM9YzKryV0D975Jvu
|
||||
HOSm9HgSTULeAUFZIa4oYyf3QUfVoI+2T/aQrfXA3gfrJWsHURkyNmiHOFAbYHqi
|
||||
xA6i249G2GTEjc1+le/M2N2CcDKAmurW6vSGK4upXQbPd6KmnhHREX74zkWjnOa+
|
||||
+tibbSSOCT4Tmja2DbBxAPuivU9IB1g/hIUmbYQqKffQrBJA0658tz6w63a/Q7xN
|
||||
pCvvbSgiMZ6qcVIcJkBT2IooYie+ax45pQECHthgIUcQAzfmIfqlU0Qfl8rDgAmn
|
||||
0c1o6HQjKGU2aVGgSRuaaiHaSZjbPIZVS51sOoI=
|
||||
-----END CERTIFICATE-----
|
20
.ci/certs/ca.pem
Normal file
20
.ci/certs/ca.pem
Normal file
@ -0,0 +1,20 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDSTCCAjGgAwIBAgIUIwN+0zglsexRKwE1RGHvlCcmrdwwDQYJKoZIhvcNAQEL
|
||||
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
|
||||
cmF0ZWQgQ0EwHhcNMTkwMjEzMDcyMjQwWhcNMjIwMjEyMDcyMjQwWjA0MTIwMAYD
|
||||
VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC
|
||||
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANILs0JO0e7x29zeVx21qalK
|
||||
XKdX+AMlGJPH75wWO/Jq6YHtxt1wYIg762krOBXfG6JsFSOIwIv5VrzGGRGjSPt9
|
||||
OXQyXrDDiQvsBT3rpzLNdDs7KMl2tZswwv7w9ujgud0cYnS1MOpn81rfPc73DvMg
|
||||
xuhplofDx6fn3++PjVRU2FNiIVWyEoaxRjCeGPMBubKZYaYbQA6vYM4Z+ByG727B
|
||||
AyAER3t7xmvYti/EoO2hv2HQk5zgcj/Oq3AJKhnt8LH8fnfm3TnYNM1htvXqhN05
|
||||
vsvhvm2PHfnA5qLlSr/3W0aI/U/PqfsFDCgyRV097sMIaKkmavb0Ue7aQ7lgtp0C
|
||||
AwEAAaNTMFEwHQYDVR0OBBYEFDRKlCMowWR1rwxE0d1lTEQe5O71MB8GA1UdIwQY
|
||||
MBaAFDRKlCMowWR1rwxE0d1lTEQe5O71MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI
|
||||
hvcNAQELBQADggEBAKbCJ95EBpeuvF70KEt6QU70k/SH1NRvM9YzKryV0D975Jvu
|
||||
HOSm9HgSTULeAUFZIa4oYyf3QUfVoI+2T/aQrfXA3gfrJWsHURkyNmiHOFAbYHqi
|
||||
xA6i249G2GTEjc1+le/M2N2CcDKAmurW6vSGK4upXQbPd6KmnhHREX74zkWjnOa+
|
||||
+tibbSSOCT4Tmja2DbBxAPuivU9IB1g/hIUmbYQqKffQrBJA0658tz6w63a/Q7xN
|
||||
pCvvbSgiMZ6qcVIcJkBT2IooYie+ax45pQECHthgIUcQAzfmIfqlU0Qfl8rDgAmn
|
||||
0c1o6HQjKGU2aVGgSRuaaiHaSZjbPIZVS51sOoI=
|
||||
-----END CERTIFICATE-----
|
19
.ci/certs/testnode.crt
Executable file
19
.ci/certs/testnode.crt
Executable file
@ -0,0 +1,19 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDIjCCAgqgAwIBAgIUI4QU6jA1dYSCbdIA6oAb2TBEluowDQYJKoZIhvcNAQEL
|
||||
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
|
||||
cmF0ZWQgQ0EwHhcNMTkwMjEzMDcyMzEzWhcNMjIwMjEyMDcyMzEzWjATMREwDwYD
|
||||
VQQDEwhpbnN0YW5jZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJeT
|
||||
yOy6EAScZxrULKjHePciiz38grivCrhFFV+dThaRCcl3DhDzb9Eny5q5iEw3WvLQ
|
||||
Rqmf01jncNIhaocTt66VqveXaMubbE8O0LcG6e4kpFO+JtnVF8JTARTc+ux/1uD6
|
||||
hO1VG/HItM7WQrQxh4hfB2u1AX2YQtoqEtXXEC+UHWfl4QzuzXjBnKCkO/L9/6Tf
|
||||
yNFQWXxKnIiTs8Xm9sEhhSCBJPlLTQu+MX4vR2Uwj5XZmflDUr+ZTenl9qYxL6b3
|
||||
SWhh/qEl4GAj1+tS7ZZOxE0237mUh3IIFYSWSaMm8K2m/BYHkLNWL5B1dMic0lsv
|
||||
osSoYrQuCef4HQMCitsCAwEAAaNNMEswHQYDVR0OBBYEFFMg4l1GLW8lYbwASY+r
|
||||
YeWYRzIiMB8GA1UdIwQYMBaAFDRKlCMowWR1rwxE0d1lTEQe5O71MAkGA1UdEwQC
|
||||
MAAwDQYJKoZIhvcNAQELBQADggEBAEQrgh1xALpumQTzsjxFRGque/vlKTgRs5Kh
|
||||
xtgapr6wjIbdq7dagee+4yNOKzS5lGVXCgwrJlHESv9qY0uumT/33vK2uduJ7NAd
|
||||
fR2ZzyBnhMX+mkYhmGrGYCTUMUIwOIQYa4Evis4W+LHmCIDG03l7gLHfdIBe9VMO
|
||||
pDZum8f6ng0MM49s8/rXODNYKw8kFyUhnfChqMi/2yggb1uUIfKlJJIchkgYjE13
|
||||
zuC+fjo029Pq1jeMIdxugLf/7I/8NiW1Yj9aCXevUXG1qzHFEuKAinBXYOZO/vWS
|
||||
LaEqOhwrzNynwgGpYAr7Rfgv4AflltYIIav4PZT03P7fbyAAf8s=
|
||||
-----END CERTIFICATE-----
|
27
.ci/certs/testnode.key
Executable file
27
.ci/certs/testnode.key
Executable file
@ -0,0 +1,27 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIEpQIBAAKCAQEAl5PI7LoQBJxnGtQsqMd49yKLPfyCuK8KuEUVX51OFpEJyXcO
|
||||
EPNv0SfLmrmITDda8tBGqZ/TWOdw0iFqhxO3rpWq95doy5tsTw7Qtwbp7iSkU74m
|
||||
2dUXwlMBFNz67H/W4PqE7VUb8ci0ztZCtDGHiF8Ha7UBfZhC2ioS1dcQL5QdZ+Xh
|
||||
DO7NeMGcoKQ78v3/pN/I0VBZfEqciJOzxeb2wSGFIIEk+UtNC74xfi9HZTCPldmZ
|
||||
+UNSv5lN6eX2pjEvpvdJaGH+oSXgYCPX61Ltlk7ETTbfuZSHcggVhJZJoybwrab8
|
||||
FgeQs1YvkHV0yJzSWy+ixKhitC4J5/gdAwKK2wIDAQABAoIBAQCRFTJna/xy/WUu
|
||||
59FLR4qAOj8++JgCwACpue4oU7/vl6nffSYokWoAr2+RzG4qTX2vFi3cpA8+dGCn
|
||||
sLZvTi8tWzKGxBTZdg2oakzaMzLr74SeZ052iCGyrZJGbvF6Ny7srr1XEXSq6+os
|
||||
ZCb6pMHOhO7saBdiKMAsY8MdjTl/33AduuE6ztqv+L92xTr2g4QlbT1KvWlEgppU
|
||||
k4Gy7zdETkPBTSH/17ZwyGJoJICIAhbL4IpmOM4dPIg8nFkVPPpy6p0z4uGjtgnK
|
||||
nreZ2EKMzCafBaHn7A77gpi0OrQdl6pe0fsGqv/323YjCJPbwwl5TsoNq44DzwiX
|
||||
3M7XiVJxAoGBAOCne56vdN4uZmCgLVGT2JSUNVPOu4bfjrxWH6cslzrPT2Zhp3lO
|
||||
M4axZ3gmcervV252YEZXntXDHHCSfrECllRN1WFD63XmyQ/CkhuvZkkeRHfzL1TE
|
||||
EdqHOTqs4sRETZ7+RITFC81DZQkWWOKeyXMjyPBqd7RnThQHijB1c8Y5AoGBAKy6
|
||||
CVKBx+zz5crVD0tz4UhOmz1wRNN0CL0l+FXRuFSgbzMIvwpfiqe25crgeLHe2M2/
|
||||
TogdWbjZ2nUZQTzoRsSkQ6cKHpj+G/gWurp/UcHHXFVwgLSPF7c3KHDtiYq7Vqw0
|
||||
bvmhM03LI6+ZIPRV7hLBr7WP7UmpAiREMF7tTnmzAoGBAIkx3w3WywFQxtblmyeB
|
||||
qbd7F2IaE23XoxyjX+tBEQ4qQqwcoSE0v8TXHIBEwjceeX+NLVhn9ClJYVniLRq+
|
||||
oL3VVqVyzB4RleJZCc98e3PV1yyFx/b1Uo3pHOsXX9lKeTjKwV9v0rhFGzPEgP3M
|
||||
yOvXA8TG0FnM6OLUg/D6GX0JAoGAMuHS4TVOGeV3ahr9mHKYiN5vKNgrzka+VEod
|
||||
L9rJ/FQOrfADpyCiDen5I5ygsXU+VM3oanyK88NpcVlxOGoMft0M+OYoQVWKE7lO
|
||||
ZKYhBX6fGqQ7pfUJPXXIOgwfmni5fZ0sm+j63g3bg10OsiumKGxaQJgXhL1+3gQg
|
||||
Y7ZwibUCgYEAlZoFFvkMLjpOSaHk1z5ZZnt19X0QUIultBwkumSqMPm+Ks7+uDrx
|
||||
thGUCoz4ecr/ci4bIUY7mB+zfAbqnBOMxreJqCRbAIuRypo1IlWkTp8DywoDOfMW
|
||||
NfzjVmzJ7EJu44nGmVAi1jw4Pbseivvi1ujMCoPgaE8I1uSh144bwN8=
|
||||
-----END RSA PRIVATE KEY-----
|
74
.ci/jobs/defaults.yml
Executable file
74
.ci/jobs/defaults.yml
Executable file
@ -0,0 +1,74 @@
|
||||
---
|
||||
|
||||
##### GLOBAL METADATA
|
||||
|
||||
- meta:
|
||||
cluster: clients-ci
|
||||
|
||||
##### JOB DEFAULTS
|
||||
|
||||
- job:
|
||||
project-type: matrix
|
||||
logrotate:
|
||||
daysToKeep: 30
|
||||
numToKeep: 100
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/master
|
||||
description: the Git branch specifier to build (<branchName>, <tagName>,
|
||||
<commitId>, etc.)
|
||||
properties:
|
||||
- github:
|
||||
url: https://github.com/elastic/eland
|
||||
- inject:
|
||||
properties-content: HOME=$JENKINS_HOME
|
||||
concurrent: true
|
||||
node: flyweight
|
||||
scm:
|
||||
- git:
|
||||
name: origin
|
||||
credentials-id: f6c7695a-671e-4f4f-a331-acdce44ff9ba
|
||||
reference-repo: /var/lib/jenkins/.git-references/eland.git
|
||||
branches:
|
||||
- ${branch_specifier}
|
||||
url: git@github.com:elastic/eland.git
|
||||
basedir: ''
|
||||
wipe-workspace: 'True'
|
||||
triggers:
|
||||
- github
|
||||
axes:
|
||||
- axis:
|
||||
type: slave
|
||||
name: label
|
||||
values:
|
||||
- linux
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: ELASTICSEARCH_VERSION
|
||||
- axis:
|
||||
type: yaml
|
||||
filename: .ci/test-matrix.yml
|
||||
name: TEST_SUITE
|
||||
yaml-strategy:
|
||||
exclude-key: exclude
|
||||
filename: .ci/test-matrix.yml
|
||||
wrappers:
|
||||
- ansicolor
|
||||
- timeout:
|
||||
type: absolute
|
||||
timeout: 120
|
||||
fail: true
|
||||
- timestamps
|
||||
- workspace-cleanup
|
||||
builders:
|
||||
- shell: |-
|
||||
#!/usr/local/bin/runbld
|
||||
.ci/run-tests
|
||||
publishers:
|
||||
- email:
|
||||
recipients: infra-root+build@elastic.co
|
||||
- junit:
|
||||
results: "build/output/*-junit.xml"
|
||||
allow-empty-results: true
|
14
.ci/jobs/elastic+eland+master.yml
Executable file
14
.ci/jobs/elastic+eland+master.yml
Executable file
@ -0,0 +1,14 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+eland+master
|
||||
display-name: 'elastic / eland # master'
|
||||
description: Eland is a data science client with a Pandas-like interface
|
||||
junit_results: "*-junit.xml"
|
||||
parameters:
|
||||
- string:
|
||||
name: branch_specifier
|
||||
default: refs/heads/master
|
||||
description: The Git branch specifier to build
|
||||
triggers:
|
||||
- github
|
||||
- timed: '@daily'
|
19
.ci/jobs/elastic+eland+pull-request.yml
Normal file
19
.ci/jobs/elastic+eland+pull-request.yml
Normal file
@ -0,0 +1,19 @@
|
||||
---
|
||||
- job:
|
||||
name: elastic+eland+pull-request
|
||||
display-name: 'elastic / eland # pull-request'
|
||||
description: Testing of eland pull requests.
|
||||
scm:
|
||||
- git:
|
||||
branches:
|
||||
- ${ghprbActualCommit}
|
||||
refspec: +refs/pull/*:refs/remotes/origin/pr/*
|
||||
triggers:
|
||||
- github-pull-request:
|
||||
org-list:
|
||||
- elastic
|
||||
allow-whitelist-orgs-as-admins: true
|
||||
github-hooks: true
|
||||
status-context: clients-ci
|
||||
cancel-builds-on-update: true
|
||||
publishers: []
|
197
.ci/run-elasticsearch.sh
Executable file
197
.ci/run-elasticsearch.sh
Executable file
@ -0,0 +1,197 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Launch one or more Elasticsearch nodes via the Docker image,
|
||||
# to form a cluster suitable for running the REST API tests.
|
||||
#
|
||||
# Export the ELASTICSEARCH_VERSION variable, eg. 'elasticsearch:8.0.0-SNAPSHOT'.
|
||||
|
||||
if [[ -z "$ELASTICSEARCH_VERSION" ]]; then
|
||||
echo -e "\033[31;1mERROR:\033[0m Required environment variable [ELASTICSEARCH_VERSION] not set\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SCRIPT_PATH=$(dirname $(realpath -s $0))
|
||||
|
||||
moniker=$(echo "$ELASTICSEARCH_VERSION" | tr -C "[:alnum:]" '-')
|
||||
suffix=rest-test
|
||||
|
||||
NODE_NAME=${NODE_NAME-${moniker}node1}
|
||||
MASTER_NODE_NAME=${MASTER_NODE_NAME-${NODE_NAME}}
|
||||
CLUSTER_NAME=${CLUSTER_NAME-${moniker}${suffix}}
|
||||
HTTP_PORT=${HTTP_PORT-9200}
|
||||
|
||||
ELASTIC_PASSWORD=${ELASTIC_PASSWORD-changeme}
|
||||
SSL_CERT=${SSL_CERT-"${SCRIPT_PATH}/certs/testnode.crt"}
|
||||
SSL_KEY=${SSL_KEY-"${SCRIPT_PATH}/certs/testnode.key"}
|
||||
SSL_CA=${SSL_CA-"${SCRIPT_PATH}/certs/ca.crt"}
|
||||
SSL_CA_PEM=${SSL_CA-"${SCRIPT_PATH}/certs/ca.pem"}
|
||||
|
||||
DETACH=${DETACH-false}
|
||||
CLEANUP=${CLEANUP-false}
|
||||
|
||||
volume_name=${NODE_NAME}-${suffix}-data
|
||||
network_default=${moniker}${suffix}
|
||||
NETWORK_NAME=${NETWORK_NAME-"$network_default"}
|
||||
|
||||
set +x
|
||||
|
||||
function cleanup_volume {
|
||||
if [[ "$(docker volume ls -q -f name=$1)" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m Removing volume $1\033[0m"
|
||||
(docker volume rm "$1") || true
|
||||
fi
|
||||
}
|
||||
function container_running {
|
||||
if [[ "$(docker ps -q -f name=$1)" ]]; then
|
||||
return 0;
|
||||
else return 1;
|
||||
fi
|
||||
}
|
||||
function cleanup_node {
|
||||
if container_running "$1"; then
|
||||
echo -e "\033[34;1mINFO:\033[0m Removing container $1\033[0m"
|
||||
(docker container rm --force --volumes "$1") || true
|
||||
cleanup_volume "$1-${suffix}-data"
|
||||
fi
|
||||
}
|
||||
function cleanup_network {
|
||||
if [[ "$(docker network ls -q -f name=$1)" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m Removing network $1\033[0m"
|
||||
(docker network rm "$1") || true
|
||||
fi
|
||||
}
|
||||
|
||||
function cleanup {
|
||||
if [[ "$DETACH" != "true" ]] || [[ "$1" == "1" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m clean the node and volume on startup (1) OR on exit if not detached\033[0m"
|
||||
cleanup_node "$NODE_NAME"
|
||||
fi
|
||||
if [[ "$DETACH" != "true" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m clean the network if not detached (start and exit)\033[0m"
|
||||
cleanup_network "$NETWORK_NAME"
|
||||
fi
|
||||
};
|
||||
trap "cleanup 0" EXIT
|
||||
|
||||
if [[ "$CLEANUP" == "true" ]]; then
|
||||
trap - EXIT
|
||||
if [[ -z "$(docker network ls -q -f name=${NETWORK_NAME})" ]]; then
|
||||
echo -e "\033[34;1mINFO:\033[0m $NETWORK_NAME is already deleted\033[0m"
|
||||
exit 0
|
||||
fi
|
||||
containers=$(docker network inspect -f '{{ range $key, $value := .Containers }}{{ printf "%s\n" .Name}}{{ end }}' ${NETWORK_NAME})
|
||||
while read -r container; do
|
||||
cleanup_node "$container"
|
||||
done <<< "$containers"
|
||||
cleanup_network "$NETWORK_NAME"
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Cleaned up and exiting\033[0m"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Making sure previous run leftover infrastructure is removed \033[0m"
|
||||
cleanup 1
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Creating network $NETWORK_NAME if it does not exist already \033[0m"
|
||||
docker network inspect "$NETWORK_NAME" > /dev/null 2>&1 || docker network create "$NETWORK_NAME"
|
||||
|
||||
environment=($(cat <<-END
|
||||
--env node.name=$NODE_NAME
|
||||
--env cluster.name=$CLUSTER_NAME
|
||||
--env cluster.initial_master_nodes=$MASTER_NODE_NAME
|
||||
--env discovery.seed_hosts=$MASTER_NODE_NAME
|
||||
--env cluster.routing.allocation.disk.threshold_enabled=false
|
||||
--env bootstrap.memory_lock=true
|
||||
--env node.attr.testattr=test
|
||||
--env path.repo=/tmp
|
||||
--env repositories.url.allowed_urls=http://snapshot.test*
|
||||
END
|
||||
))
|
||||
|
||||
volumes=($(cat <<-END
|
||||
--volume $volume_name:/usr/share/elasticsearch/data
|
||||
END
|
||||
))
|
||||
|
||||
if [[ "$ELASTICSEARCH_VERSION" != *oss* ]]; then
|
||||
environment+=($(cat <<-END
|
||||
--env ELASTIC_PASSWORD=$ELASTIC_PASSWORD
|
||||
--env xpack.license.self_generated.type=trial
|
||||
--env xpack.security.enabled=true
|
||||
--env xpack.security.http.ssl.enabled=true
|
||||
--env xpack.security.http.ssl.verification_mode=certificate
|
||||
--env xpack.security.http.ssl.key=certs/testnode.key
|
||||
--env xpack.security.http.ssl.certificate=certs/testnode.crt
|
||||
--env xpack.security.http.ssl.certificate_authorities=certs/ca.crt
|
||||
--env xpack.security.transport.ssl.enabled=true
|
||||
--env xpack.security.transport.ssl.key=certs/testnode.key
|
||||
--env xpack.security.transport.ssl.certificate=certs/testnode.crt
|
||||
--env xpack.security.transport.ssl.certificate_authorities=certs/ca.crt
|
||||
END
|
||||
))
|
||||
volumes+=($(cat <<-END
|
||||
--volume $SSL_CERT:/usr/share/elasticsearch/config/certs/testnode.crt
|
||||
--volume $SSL_KEY:/usr/share/elasticsearch/config/certs/testnode.key
|
||||
--volume $SSL_CA:/usr/share/elasticsearch/config/certs/ca.crt
|
||||
--volume $SSL_CA_PEM:/usr/share/elasticsearch/config/certs/ca.pem
|
||||
END
|
||||
))
|
||||
fi
|
||||
|
||||
url="http://$NODE_NAME"
|
||||
if [[ "$ELASTICSEARCH_VERSION" != *oss* ]]; then
|
||||
url="https://elastic:$ELASTIC_PASSWORD@$NODE_NAME"
|
||||
fi
|
||||
|
||||
cert_validation_flags="--insecure"
|
||||
if [[ "$NODE_NAME" == "instance" ]]; then
|
||||
cert_validation_flags="--cacert /usr/share/elasticsearch/config/certs/ca.pem --resolve ${NODE_NAME}:443:127.0.0.1"
|
||||
fi
|
||||
|
||||
echo -e "\033[34;1mINFO:\033[0m Starting container $NODE_NAME \033[0m"
|
||||
set -x
|
||||
docker run \
|
||||
--name "$NODE_NAME" \
|
||||
--network "$NETWORK_NAME" \
|
||||
--env ES_JAVA_OPTS=-"Xms1g -Xmx1g" \
|
||||
"${environment[@]}" \
|
||||
"${volumes[@]}" \
|
||||
--publish "$HTTP_PORT":9200 \
|
||||
--ulimit nofile=65536:65536 \
|
||||
--ulimit memlock=-1:-1 \
|
||||
--detach="$DETACH" \
|
||||
--health-cmd="curl $cert_validation_flags --fail $url:9200/_cluster/health || exit 1" \
|
||||
--health-interval=2s \
|
||||
--health-retries=20 \
|
||||
--health-timeout=2s \
|
||||
--rm \
|
||||
docker.elastic.co/elasticsearch/"$ELASTICSEARCH_VERSION";
|
||||
set +x
|
||||
|
||||
if [[ "$DETACH" == "true" ]]; then
|
||||
until ! container_running "$NODE_NAME" || (container_running "$NODE_NAME" && [[ "$(docker inspect -f "{{.State.Health.Status}}" ${NODE_NAME})" != "starting" ]]); do
|
||||
echo ""
|
||||
docker inspect -f "{{range .State.Health.Log}}{{.Output}}{{end}}" ${NODE_NAME}
|
||||
echo -e "\033[34;1mINFO:\033[0m waiting for node $NODE_NAME to be up\033[0m"
|
||||
sleep 2;
|
||||
done;
|
||||
|
||||
# Always show logs if the container is running, this is very useful both on CI as well as while developing
|
||||
if container_running "$NODE_NAME"; then
|
||||
docker logs $NODE_NAME
|
||||
fi
|
||||
|
||||
if ! container_running "$NODE_NAME" || [[ "$(docker inspect -f "{{.State.Health.Status}}" ${NODE_NAME})" != "healthy" ]]; then
|
||||
cleanup 1
|
||||
echo
|
||||
echo -e "\033[31;1mERROR:\033[0m Failed to start ${ELASTICSEARCH_VERSION} in detached mode beyond health checks\033[0m"
|
||||
echo -e "\033[31;1mERROR:\033[0m dumped the docker log before shutting the node down\033[0m"
|
||||
exit 1
|
||||
else
|
||||
echo
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Detached and healthy: ${NODE_NAME} on docker network: ${NETWORK_NAME}\033[0m"
|
||||
echo -e "\033[32;1mSUCCESS:\033[0m Running on: ${url/$NODE_NAME/localhost}:${HTTP_PORT}\033[0m"
|
||||
exit 0
|
||||
fi
|
||||
fi
|
70
.ci/run-tests
Executable file
70
.ci/run-tests
Executable file
@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if [[ -z $ELASTICSEARCH_VERSION ]]; then
|
||||
echo -e "\033[31;1mERROR:\033[0m Required environment variable [ELASTICSEARCH_VERSION] not set\033[0m"
|
||||
exit 1
|
||||
fi
|
||||
set -euxo pipefail
|
||||
|
||||
|
||||
TEST_SUITE=${TEST_SUITE-oss}
|
||||
NODE_NAME=instance
|
||||
|
||||
|
||||
repo=$(pwd)
|
||||
|
||||
elasticsearch_image=elasticsearch
|
||||
elasticsearch_url=https://elastic:changeme@${NODE_NAME}:9200
|
||||
if [[ $TEST_SUITE != "xpack" ]]; then
|
||||
elasticsearch_image=elasticsearch-${TEST_SUITE}
|
||||
elasticsearch_url=http://${NODE_NAME}:9200
|
||||
fi
|
||||
|
||||
function cleanup {
|
||||
status=$?
|
||||
set +x
|
||||
ELASTICSEARCH_VERSION=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
CLEANUP=true \
|
||||
bash ./.ci/run-elasticsearch.sh
|
||||
# Report status and exit
|
||||
if [[ "$status" == "0" ]]; then
|
||||
echo -e "\n\033[32;1mSUCCESS run-tests\033[0m"
|
||||
exit 0
|
||||
else
|
||||
echo -e "\n\033[31;1mFAILURE during run-tests\033[0m"
|
||||
exit ${status}
|
||||
fi
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
echo -e "\033[1m>>>>> Start [$ELASTICSEARCH_VERSION container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
ELASTICSEARCH_VERSION=${elasticsearch_image}:${ELASTICSEARCH_VERSION} \
|
||||
NODE_NAME=${NODE_NAME} \
|
||||
NETWORK_NAME=elasticsearch \
|
||||
DETACH=true \
|
||||
bash .ci/run-elasticsearch.sh
|
||||
|
||||
echo -e "\033[1m>>>>> YOUR STEPS HERE >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
## Add your steps here
|
||||
## Use ${elasticsearch_url} to talk to elasticsearch
|
||||
## declare your matrix variables here as well e.g DOTNET_VERSION=${DOTNET_VERSION-3.0.100}
|
||||
|
||||
|
||||
echo -e "\033[1m>>>>> Build [elastic/eland container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
docker build --file .ci/Dockerfile --tag elastic/eland .
|
||||
|
||||
echo -e "\033[1m>>>>> Run [elastic/eland container] >>>>>>>>>>>>>>>>>>>>>>>>>>>>>\033[0m"
|
||||
|
||||
docker run \
|
||||
--network=elasticsearch \
|
||||
--env "ELASTICSEARCH_HOST=${elasticsearch_url}" \
|
||||
--env "TEST_SUITE=${TEST_SUITE}" \
|
||||
--name eland-test-runner \
|
||||
--rm \
|
||||
elastic/eland \
|
||||
./run_build.sh
|
17
.ci/test-matrix.yml
Executable file
17
.ci/test-matrix.yml
Executable file
@ -0,0 +1,17 @@
|
||||
---
|
||||
|
||||
ELASTICSEARCH_VERSION:
|
||||
- 8.0.0-SNAPSHOT
|
||||
- 7.5-SNAPSHOT
|
||||
|
||||
TEST_SUITE:
|
||||
- oss
|
||||
- xpack
|
||||
|
||||
PYTHON_VERSION:
|
||||
- 3.7
|
||||
- 3.6
|
||||
- 3.5.3
|
||||
|
||||
|
||||
exclude: ~
|
3
.dockerignore
Normal file
3
.dockerignore
Normal file
@ -0,0 +1,3 @@
|
||||
docs/*
|
||||
example/*
|
||||
.git
|
@ -1,11 +1,22 @@
|
||||
import os
|
||||
|
||||
from elasticsearch import Elasticsearch
|
||||
import pandas as pd
|
||||
|
||||
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# Define test files and indices
|
||||
ELASTICSEARCH_HOST = 'localhost' # TODO externalise this
|
||||
ELASTICSEARCH_HOST = os.environ.get('ELASTICSEARCH_HOST') or 'localhost'
|
||||
|
||||
# Define client to use in tests
|
||||
TEST_SUITE = os.environ.get('TEST_SUITE')
|
||||
if TEST_SUITE == 'xpack':
|
||||
print('Running xpack tests requires SSL. Setting up SSL enabled client')
|
||||
certpath = os.path.join(os.path.dirname(__file__), '../../.ci/certs/ca.crt')
|
||||
print(certpath)
|
||||
ES_TEST_CLIENT = Elasticsearch(ELASTICSEARCH_HOST, use_ssl=True, verify_certs=True, ca_certs=certpath)
|
||||
else:
|
||||
ES_TEST_CLIENT = Elasticsearch(ELASTICSEARCH_HOST)
|
||||
|
||||
FLIGHTS_INDEX_NAME = 'flights'
|
||||
FLIGHTS_MAPPING = {"mappings": {
|
||||
|
@ -8,7 +8,7 @@ import eland as ed
|
||||
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
# Create pandas and eland data frames
|
||||
from eland.tests import ELASTICSEARCH_HOST
|
||||
from eland.tests import ES_TEST_CLIENT
|
||||
from eland.tests import FLIGHTS_DF_FILE_NAME, FLIGHTS_INDEX_NAME, \
|
||||
FLIGHTS_SMALL_INDEX_NAME, \
|
||||
ECOMMERCE_DF_FILE_NAME, ECOMMERCE_INDEX_NAME
|
||||
@ -17,10 +17,10 @@ _pd_flights = pd.read_json(FLIGHTS_DF_FILE_NAME).sort_index()
|
||||
_pd_flights['timestamp'] = \
|
||||
pd.to_datetime(_pd_flights['timestamp'])
|
||||
_pd_flights.index = _pd_flights.index.map(str) # make index 'object' not int
|
||||
_ed_flights = ed.read_es(ELASTICSEARCH_HOST, FLIGHTS_INDEX_NAME)
|
||||
_ed_flights = ed.read_es(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME)
|
||||
|
||||
_pd_flights_small = _pd_flights.head(48)
|
||||
_ed_flights_small = ed.read_es(ELASTICSEARCH_HOST, FLIGHTS_SMALL_INDEX_NAME)
|
||||
_ed_flights_small = ed.read_es(ES_TEST_CLIENT, FLIGHTS_SMALL_INDEX_NAME)
|
||||
|
||||
_pd_ecommerce = pd.read_json(ECOMMERCE_DF_FILE_NAME).sort_index()
|
||||
_pd_ecommerce['order_date'] = \
|
||||
@ -30,7 +30,7 @@ _pd_ecommerce['products.created_on'] = \
|
||||
_pd_ecommerce.insert(2, 'customer_birth_date', None)
|
||||
_pd_ecommerce.index = _pd_ecommerce.index.map(str) # make index 'object' not int
|
||||
_pd_ecommerce['customer_birth_date'].astype('datetime64')
|
||||
_ed_ecommerce = ed.read_es(ELASTICSEARCH_HOST, ECOMMERCE_INDEX_NAME)
|
||||
_ed_ecommerce = ed.read_es(ES_TEST_CLIENT, ECOMMERCE_INDEX_NAME)
|
||||
|
||||
|
||||
class TestData:
|
||||
|
@ -4,7 +4,7 @@ import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
import eland as ed
|
||||
from eland.tests.common import ELASTICSEARCH_HOST
|
||||
from eland.tests.common import ES_TEST_CLIENT
|
||||
from eland.tests.common import TestData
|
||||
from eland.tests.common import assert_pandas_eland_frame_equal
|
||||
|
||||
@ -37,7 +37,7 @@ class TestDataFrameDateTime(TestData):
|
||||
# Now create index
|
||||
index_name = 'eland_test_generate_es_mappings'
|
||||
|
||||
ed_df = ed.pandas_to_eland(df, ELASTICSEARCH_HOST, index_name, if_exists="replace", refresh=True)
|
||||
ed_df = ed.pandas_to_eland(df, ES_TEST_CLIENT, index_name, if_exists="replace", refresh=True)
|
||||
ed_df_head = ed_df.head()
|
||||
|
||||
assert_pandas_eland_frame_equal(df, ed_df_head)
|
||||
|
@ -3,7 +3,7 @@
|
||||
import pytest
|
||||
|
||||
import eland as ed
|
||||
from eland.tests import ELASTICSEARCH_HOST
|
||||
from eland.tests import ES_TEST_CLIENT
|
||||
from eland.tests import FLIGHTS_INDEX_NAME
|
||||
|
||||
|
||||
@ -16,15 +16,15 @@ class TestDataFrameInit:
|
||||
|
||||
# Construct invalid DataFrame (throws)
|
||||
with pytest.raises(ValueError):
|
||||
df = ed.DataFrame(client=ELASTICSEARCH_HOST)
|
||||
df = ed.DataFrame(client=ES_TEST_CLIENT)
|
||||
|
||||
# Construct invalid DataFrame (throws)
|
||||
with pytest.raises(ValueError):
|
||||
df = ed.DataFrame(index_pattern=FLIGHTS_INDEX_NAME)
|
||||
|
||||
# Good constructors
|
||||
df0 = ed.DataFrame(ELASTICSEARCH_HOST, FLIGHTS_INDEX_NAME)
|
||||
df1 = ed.DataFrame(client=ELASTICSEARCH_HOST, index_pattern=FLIGHTS_INDEX_NAME)
|
||||
df0 = ed.DataFrame(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME)
|
||||
df1 = ed.DataFrame(client=ES_TEST_CLIENT, index_pattern=FLIGHTS_INDEX_NAME)
|
||||
|
||||
qc = ed.ElandQueryCompiler(client=ELASTICSEARCH_HOST, index_pattern=FLIGHTS_INDEX_NAME)
|
||||
qc = ed.ElandQueryCompiler(client=ES_TEST_CLIENT, index_pattern=FLIGHTS_INDEX_NAME)
|
||||
df2 = ed.DataFrame(query_compiler=qc)
|
||||
|
@ -4,7 +4,7 @@ import pandas as pd
|
||||
from elasticsearch import Elasticsearch
|
||||
|
||||
import eland as ed
|
||||
from eland.tests.common import ELASTICSEARCH_HOST
|
||||
from eland.tests.common import ES_TEST_CLIENT
|
||||
from eland.tests.common import TestData
|
||||
from eland.tests.common import assert_pandas_eland_frame_equal
|
||||
|
||||
@ -25,13 +25,10 @@ class TestDataFrameQuery(TestData):
|
||||
3 4 4 7
|
||||
4 5 2 6
|
||||
"""
|
||||
|
||||
es = Elasticsearch(ELASTICSEARCH_HOST)
|
||||
|
||||
# Now create index
|
||||
index_name = 'eland_test_query'
|
||||
|
||||
ed_df = ed.pandas_to_eland(pd_df, es, index_name, if_exists="replace", refresh=True)
|
||||
ed_df = ed.pandas_to_eland(pd_df, ES_TEST_CLIENT, index_name, if_exists="replace", refresh=True)
|
||||
|
||||
assert_pandas_eland_frame_equal(pd_df, ed_df)
|
||||
|
||||
@ -55,7 +52,7 @@ class TestDataFrameQuery(TestData):
|
||||
|
||||
assert_pandas_eland_frame_equal(pd_q4, ed_q4)
|
||||
|
||||
es.indices.delete(index_name)
|
||||
ES_TEST_CLIENT.indices.delete(index_name)
|
||||
|
||||
def test_simple_query(self):
|
||||
ed_flights = self.ed_flights()
|
||||
@ -84,12 +81,10 @@ class TestDataFrameQuery(TestData):
|
||||
3 4 4 7
|
||||
4 5 2 6
|
||||
"""
|
||||
es = Elasticsearch(ELASTICSEARCH_HOST)
|
||||
|
||||
# Now create index
|
||||
index_name = 'eland_test_query'
|
||||
|
||||
ed_df = ed.pandas_to_eland(pd_df, es, index_name, if_exists="replace", refresh=True)
|
||||
ed_df = ed.pandas_to_eland(pd_df, ES_TEST_CLIENT, index_name, if_exists="replace", refresh=True)
|
||||
|
||||
assert_pandas_eland_frame_equal(pd_df, ed_df)
|
||||
|
||||
@ -119,4 +114,4 @@ class TestDataFrameQuery(TestData):
|
||||
|
||||
assert_pandas_eland_frame_equal(pd_q4, ed_q4)
|
||||
|
||||
#es.indices.delete(index_name)
|
||||
ES_TEST_CLIENT.indices.delete(index_name)
|
@ -8,7 +8,7 @@ from elasticsearch import Elasticsearch
|
||||
from pandas.util.testing import assert_frame_equal
|
||||
|
||||
import eland as ed
|
||||
from eland.tests import ELASTICSEARCH_HOST
|
||||
from eland.tests import ES_TEST_CLIENT
|
||||
from eland.tests import FLIGHTS_INDEX_NAME
|
||||
from eland.tests.common import ROOT_DIR
|
||||
from eland.tests.common import TestData
|
||||
@ -52,9 +52,8 @@ class TestDataFrameToCSV(TestData):
|
||||
now_millis = int(round(time.time() * 1000))
|
||||
|
||||
test_index = FLIGHTS_INDEX_NAME + '.' + str(now_millis)
|
||||
es = Elasticsearch(ELASTICSEARCH_HOST)
|
||||
|
||||
ed_flights_from_csv = ed.read_csv(results_file, es, test_index, index_col=0, es_refresh=True,
|
||||
ed_flights_from_csv = ed.read_csv(results_file, ES_TEST_CLIENT, test_index, index_col=0, es_refresh=True,
|
||||
es_geo_points=['OriginLocation', 'DestLocation'],
|
||||
converters={
|
||||
'DestLocation': lambda x: ast.literal_eval(x),
|
||||
|
@ -4,7 +4,7 @@ import numpy as np
|
||||
import pandas as pd
|
||||
|
||||
import eland as ed
|
||||
from eland.tests.common import ELASTICSEARCH_HOST, assert_pandas_eland_frame_equal
|
||||
from eland.tests.common import ES_TEST_CLIENT, assert_pandas_eland_frame_equal
|
||||
from eland.tests.common import TestData
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ class TestDataFrameUtils(TestData):
|
||||
# Now create index
|
||||
index_name = 'eland_test_generate_es_mappings'
|
||||
|
||||
ed_df = ed.pandas_to_eland(df, ELASTICSEARCH_HOST, index_name, if_exists="replace", refresh=True)
|
||||
ed_df = ed.pandas_to_eland(df, ES_TEST_CLIENT, index_name, if_exists="replace", refresh=True)
|
||||
ed_df_head = ed_df.head()
|
||||
|
||||
assert_pandas_eland_frame_equal(df, ed_df_head)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# File called _pytest for PyCharm compatability
|
||||
import eland as ed
|
||||
from eland.tests import ELASTICSEARCH_HOST
|
||||
from eland.tests import ES_TEST_CLIENT
|
||||
from eland.tests import FLIGHTS_INDEX_NAME
|
||||
from eland.tests.common import TestData
|
||||
from eland.tests.common import assert_pandas_eland_series_equal
|
||||
@ -10,7 +10,7 @@ class TestSeriesHeadTail(TestData):
|
||||
|
||||
def test_head_tail(self):
|
||||
pd_s = self.pd_flights()['Carrier']
|
||||
ed_s = ed.Series(ELASTICSEARCH_HOST, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
ed_s = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
|
||||
pd_s_head = pd_s.head(10)
|
||||
ed_s_head = ed_s.head(10)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# File called _pytest for PyCharm compatability
|
||||
import eland as ed
|
||||
from eland.tests import ELASTICSEARCH_HOST
|
||||
from eland.tests import ES_TEST_CLIENT
|
||||
from eland.tests import FLIGHTS_INDEX_NAME
|
||||
from eland.tests.common import TestData
|
||||
from eland.tests.common import assert_pandas_eland_series_equal
|
||||
@ -11,7 +11,7 @@ class TestSeriesName(TestData):
|
||||
def test_name(self):
|
||||
# deep copy pandas DataFrame as .name alters this reference frame
|
||||
pd_series = self.pd_flights()['Carrier'].copy(deep=True)
|
||||
ed_series = ed.Series(ELASTICSEARCH_HOST, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
ed_series = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
|
||||
assert_pandas_eland_series_equal(pd_series, ed_series)
|
||||
assert ed_series.name == pd_series.name
|
||||
|
@ -1,6 +1,6 @@
|
||||
# File called _pytest for PyCharm compatability
|
||||
import eland as ed
|
||||
from eland.tests import ELASTICSEARCH_HOST
|
||||
from eland.tests import ES_TEST_CLIENT
|
||||
from eland.tests import FLIGHTS_INDEX_NAME
|
||||
from eland.tests.common import TestData
|
||||
from eland.tests.common import assert_pandas_eland_series_equal
|
||||
@ -10,7 +10,7 @@ class TestSeriesRename(TestData):
|
||||
|
||||
def test_rename(self):
|
||||
pd_carrier = self.pd_flights()['Carrier']
|
||||
ed_carrier = ed.Series(ELASTICSEARCH_HOST, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
ed_carrier = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
|
||||
assert_pandas_eland_series_equal(pd_carrier, ed_carrier)
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
# File called _pytest for PyCharm compatability
|
||||
import eland as ed
|
||||
from eland.tests import ELASTICSEARCH_HOST
|
||||
from eland.tests import ES_TEST_CLIENT
|
||||
from eland.tests import FLIGHTS_INDEX_NAME
|
||||
from eland.tests.common import TestData
|
||||
|
||||
@ -9,7 +9,7 @@ class TestSeriesRepr(TestData):
|
||||
|
||||
def test_repr_flights_carrier(self):
|
||||
pd_s = self.pd_flights()['Carrier']
|
||||
ed_s = ed.Series(ELASTICSEARCH_HOST, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
ed_s = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier')
|
||||
|
||||
pd_repr = repr(pd_s)
|
||||
ed_repr = repr(ed_s)
|
||||
@ -18,7 +18,7 @@ class TestSeriesRepr(TestData):
|
||||
|
||||
def test_repr_flights_carrier_5(self):
|
||||
pd_s = self.pd_flights()['Carrier'].head(5)
|
||||
ed_s = ed.Series(ELASTICSEARCH_HOST, FLIGHTS_INDEX_NAME, 'Carrier').head(5)
|
||||
ed_s = ed.Series(ES_TEST_CLIENT, FLIGHTS_INDEX_NAME, 'Carrier').head(5)
|
||||
|
||||
pd_repr = repr(pd_s)
|
||||
ed_repr = repr(ed_s)
|
||||
|
@ -1,5 +1,6 @@
|
||||
from elasticsearch import Elasticsearch
|
||||
from elasticsearch import helpers
|
||||
from elasticsearch.client import ClusterClient
|
||||
|
||||
from eland.tests import *
|
||||
|
||||
@ -50,6 +51,16 @@ def _setup_data(es):
|
||||
|
||||
print("Done", index_name)
|
||||
|
||||
def _update_max_compilations_limit(es, limit="10000/1m"):
|
||||
print('Updating script.max_compilations_rate to ', limit)
|
||||
cluster_client = ClusterClient(es)
|
||||
body = {
|
||||
"transient" : {
|
||||
"script.max_compilations_rate" : limit
|
||||
}
|
||||
}
|
||||
cluster_client.put_settings(body=body)
|
||||
|
||||
|
||||
def _setup_test_mappings(es):
|
||||
# Create a complex mapping containing many Elasticsearch features
|
||||
@ -66,8 +77,11 @@ def _setup_test_nested(es):
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Create connection to Elasticsearch - use defaults
|
||||
es = Elasticsearch(ELASTICSEARCH_HOST)
|
||||
print('Connecting to ES', ELASTICSEARCH_HOST)
|
||||
es = ES_TEST_CLIENT
|
||||
|
||||
|
||||
_setup_data(es)
|
||||
_setup_test_mappings(es)
|
||||
_setup_test_nested(es)
|
||||
_update_max_compilations_limit(es)
|
||||
|
4
run_build.sh
Executable file
4
run_build.sh
Executable file
@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
python -m eland.tests.setup_tests
|
||||
pytest
|
Loading…
x
Reference in New Issue
Block a user