mirror of
https://github.com/elastic/eland.git
synced 2025-07-11 00:02:14 +08:00
[docs] Migrate docs from AsciiDoc to Markdown (#762)
Co-authored-by: István Zoltán Szabó <szabosteve@gmail.com>
This commit is contained in:
parent
6692251d9e
commit
ca64672fd7
485
docs/docset.yml
Normal file
485
docs/docset.yml
Normal file
@ -0,0 +1,485 @@
|
|||||||
|
project: 'Eland Python client'
|
||||||
|
cross_links:
|
||||||
|
- docs-content
|
||||||
|
toc:
|
||||||
|
- toc: reference
|
||||||
|
subs:
|
||||||
|
ref: "https://www.elastic.co/guide/en/elasticsearch/reference/current"
|
||||||
|
ref-bare: "https://www.elastic.co/guide/en/elasticsearch/reference"
|
||||||
|
ref-8x: "https://www.elastic.co/guide/en/elasticsearch/reference/8.1"
|
||||||
|
ref-80: "https://www.elastic.co/guide/en/elasticsearch/reference/8.0"
|
||||||
|
ref-7x: "https://www.elastic.co/guide/en/elasticsearch/reference/7.17"
|
||||||
|
ref-70: "https://www.elastic.co/guide/en/elasticsearch/reference/7.0"
|
||||||
|
ref-60: "https://www.elastic.co/guide/en/elasticsearch/reference/6.0"
|
||||||
|
ref-64: "https://www.elastic.co/guide/en/elasticsearch/reference/6.4"
|
||||||
|
xpack-ref: "https://www.elastic.co/guide/en/x-pack/6.2"
|
||||||
|
logstash-ref: "https://www.elastic.co/guide/en/logstash/current"
|
||||||
|
kibana-ref: "https://www.elastic.co/guide/en/kibana/current"
|
||||||
|
kibana-ref-all: "https://www.elastic.co/guide/en/kibana"
|
||||||
|
beats-ref-root: "https://www.elastic.co/guide/en/beats"
|
||||||
|
beats-ref: "https://www.elastic.co/guide/en/beats/libbeat/current"
|
||||||
|
beats-ref-60: "https://www.elastic.co/guide/en/beats/libbeat/6.0"
|
||||||
|
beats-ref-63: "https://www.elastic.co/guide/en/beats/libbeat/6.3"
|
||||||
|
beats-devguide: "https://www.elastic.co/guide/en/beats/devguide/current"
|
||||||
|
auditbeat-ref: "https://www.elastic.co/guide/en/beats/auditbeat/current"
|
||||||
|
packetbeat-ref: "https://www.elastic.co/guide/en/beats/packetbeat/current"
|
||||||
|
metricbeat-ref: "https://www.elastic.co/guide/en/beats/metricbeat/current"
|
||||||
|
filebeat-ref: "https://www.elastic.co/guide/en/beats/filebeat/current"
|
||||||
|
functionbeat-ref: "https://www.elastic.co/guide/en/beats/functionbeat/current"
|
||||||
|
winlogbeat-ref: "https://www.elastic.co/guide/en/beats/winlogbeat/current"
|
||||||
|
heartbeat-ref: "https://www.elastic.co/guide/en/beats/heartbeat/current"
|
||||||
|
journalbeat-ref: "https://www.elastic.co/guide/en/beats/journalbeat/current"
|
||||||
|
ingest-guide: "https://www.elastic.co/guide/en/ingest/current"
|
||||||
|
fleet-guide: "https://www.elastic.co/guide/en/fleet/current"
|
||||||
|
apm-guide-ref: "https://www.elastic.co/guide/en/apm/guide/current"
|
||||||
|
apm-guide-7x: "https://www.elastic.co/guide/en/apm/guide/7.17"
|
||||||
|
apm-app-ref: "https://www.elastic.co/guide/en/kibana/current"
|
||||||
|
apm-agents-ref: "https://www.elastic.co/guide/en/apm/agent"
|
||||||
|
apm-android-ref: "https://www.elastic.co/guide/en/apm/agent/android/current"
|
||||||
|
apm-py-ref: "https://www.elastic.co/guide/en/apm/agent/python/current"
|
||||||
|
apm-py-ref-3x: "https://www.elastic.co/guide/en/apm/agent/python/3.x"
|
||||||
|
apm-node-ref-index: "https://www.elastic.co/guide/en/apm/agent/nodejs"
|
||||||
|
apm-node-ref: "https://www.elastic.co/guide/en/apm/agent/nodejs/current"
|
||||||
|
apm-node-ref-1x: "https://www.elastic.co/guide/en/apm/agent/nodejs/1.x"
|
||||||
|
apm-rum-ref: "https://www.elastic.co/guide/en/apm/agent/rum-js/current"
|
||||||
|
apm-ruby-ref: "https://www.elastic.co/guide/en/apm/agent/ruby/current"
|
||||||
|
apm-java-ref: "https://www.elastic.co/guide/en/apm/agent/java/current"
|
||||||
|
apm-go-ref: "https://www.elastic.co/guide/en/apm/agent/go/current"
|
||||||
|
apm-dotnet-ref: "https://www.elastic.co/guide/en/apm/agent/dotnet/current"
|
||||||
|
apm-php-ref: "https://www.elastic.co/guide/en/apm/agent/php/current"
|
||||||
|
apm-ios-ref: "https://www.elastic.co/guide/en/apm/agent/swift/current"
|
||||||
|
apm-lambda-ref: "https://www.elastic.co/guide/en/apm/lambda/current"
|
||||||
|
apm-attacher-ref: "https://www.elastic.co/guide/en/apm/attacher/current"
|
||||||
|
docker-logging-ref: "https://www.elastic.co/guide/en/beats/loggingplugin/current"
|
||||||
|
esf-ref: "https://www.elastic.co/guide/en/esf/current"
|
||||||
|
kinesis-firehose-ref: "https://www.elastic.co/guide/en/kinesis/{{kinesis_version}}"
|
||||||
|
estc-welcome-current: "https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions/current"
|
||||||
|
estc-welcome: "https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions/current"
|
||||||
|
estc-welcome-all: "https://www.elastic.co/guide/en/starting-with-the-elasticsearch-platform-and-its-solutions"
|
||||||
|
hadoop-ref: "https://www.elastic.co/guide/en/elasticsearch/hadoop/current"
|
||||||
|
stack-ref: "https://www.elastic.co/guide/en/elastic-stack/current"
|
||||||
|
stack-ref-67: "https://www.elastic.co/guide/en/elastic-stack/6.7"
|
||||||
|
stack-ref-68: "https://www.elastic.co/guide/en/elastic-stack/6.8"
|
||||||
|
stack-ref-70: "https://www.elastic.co/guide/en/elastic-stack/7.0"
|
||||||
|
stack-ref-80: "https://www.elastic.co/guide/en/elastic-stack/8.0"
|
||||||
|
stack-ov: "https://www.elastic.co/guide/en/elastic-stack-overview/current"
|
||||||
|
stack-gs: "https://www.elastic.co/guide/en/elastic-stack-get-started/current"
|
||||||
|
stack-gs-current: "https://www.elastic.co/guide/en/elastic-stack-get-started/current"
|
||||||
|
javaclient: "https://www.elastic.co/guide/en/elasticsearch/client/java-api/current"
|
||||||
|
java-api-client: "https://www.elastic.co/guide/en/elasticsearch/client/java-api-client/current"
|
||||||
|
java-rest: "https://www.elastic.co/guide/en/elasticsearch/client/java-rest/current"
|
||||||
|
jsclient: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current"
|
||||||
|
jsclient-current: "https://www.elastic.co/guide/en/elasticsearch/client/javascript-api/current"
|
||||||
|
es-ruby-client: "https://www.elastic.co/guide/en/elasticsearch/client/ruby-api/current"
|
||||||
|
es-dotnet-client: "https://www.elastic.co/guide/en/elasticsearch/client/net-api/current"
|
||||||
|
es-php-client: "https://www.elastic.co/guide/en/elasticsearch/client/php-api/current"
|
||||||
|
es-python-client: "https://www.elastic.co/guide/en/elasticsearch/client/python-api/current"
|
||||||
|
defguide: "https://www.elastic.co/guide/en/elasticsearch/guide/2.x"
|
||||||
|
painless: "https://www.elastic.co/guide/en/elasticsearch/painless/current"
|
||||||
|
plugins: "https://www.elastic.co/guide/en/elasticsearch/plugins/current"
|
||||||
|
plugins-8x: "https://www.elastic.co/guide/en/elasticsearch/plugins/8.1"
|
||||||
|
plugins-7x: "https://www.elastic.co/guide/en/elasticsearch/plugins/7.17"
|
||||||
|
plugins-6x: "https://www.elastic.co/guide/en/elasticsearch/plugins/6.8"
|
||||||
|
glossary: "https://www.elastic.co/guide/en/elastic-stack-glossary/current"
|
||||||
|
upgrade_guide: "https://www.elastic.co/products/upgrade_guide"
|
||||||
|
blog-ref: "https://www.elastic.co/blog/"
|
||||||
|
curator-ref: "https://www.elastic.co/guide/en/elasticsearch/client/curator/current"
|
||||||
|
curator-ref-current: "https://www.elastic.co/guide/en/elasticsearch/client/curator/current"
|
||||||
|
metrics-ref: "https://www.elastic.co/guide/en/metrics/current"
|
||||||
|
metrics-guide: "https://www.elastic.co/guide/en/metrics/guide/current"
|
||||||
|
logs-ref: "https://www.elastic.co/guide/en/logs/current"
|
||||||
|
logs-guide: "https://www.elastic.co/guide/en/logs/guide/current"
|
||||||
|
uptime-guide: "https://www.elastic.co/guide/en/uptime/current"
|
||||||
|
observability-guide: "https://www.elastic.co/guide/en/observability/current"
|
||||||
|
observability-guide-all: "https://www.elastic.co/guide/en/observability"
|
||||||
|
siem-guide: "https://www.elastic.co/guide/en/siem/guide/current"
|
||||||
|
security-guide: "https://www.elastic.co/guide/en/security/current"
|
||||||
|
security-guide-all: "https://www.elastic.co/guide/en/security"
|
||||||
|
endpoint-guide: "https://www.elastic.co/guide/en/endpoint/current"
|
||||||
|
sql-odbc: "https://www.elastic.co/guide/en/elasticsearch/sql-odbc/current"
|
||||||
|
ecs-ref: "https://www.elastic.co/guide/en/ecs/current"
|
||||||
|
ecs-logging-ref: "https://www.elastic.co/guide/en/ecs-logging/overview/current"
|
||||||
|
ecs-logging-go-logrus-ref: "https://www.elastic.co/guide/en/ecs-logging/go-logrus/current"
|
||||||
|
ecs-logging-go-zap-ref: "https://www.elastic.co/guide/en/ecs-logging/go-zap/current"
|
||||||
|
ecs-logging-go-zerolog-ref: "https://www.elastic.co/guide/en/ecs-logging/go-zap/current"
|
||||||
|
ecs-logging-java-ref: "https://www.elastic.co/guide/en/ecs-logging/java/current"
|
||||||
|
ecs-logging-dotnet-ref: "https://www.elastic.co/guide/en/ecs-logging/dotnet/current"
|
||||||
|
ecs-logging-nodejs-ref: "https://www.elastic.co/guide/en/ecs-logging/nodejs/current"
|
||||||
|
ecs-logging-php-ref: "https://www.elastic.co/guide/en/ecs-logging/php/current"
|
||||||
|
ecs-logging-python-ref: "https://www.elastic.co/guide/en/ecs-logging/python/current"
|
||||||
|
ecs-logging-ruby-ref: "https://www.elastic.co/guide/en/ecs-logging/ruby/current"
|
||||||
|
ml-docs: "https://www.elastic.co/guide/en/machine-learning/current"
|
||||||
|
eland-docs: "https://www.elastic.co/guide/en/elasticsearch/client/eland/current"
|
||||||
|
eql-ref: "https://eql.readthedocs.io/en/latest/query-guide"
|
||||||
|
extendtrial: "https://www.elastic.co/trialextension"
|
||||||
|
wikipedia: "https://en.wikipedia.org/wiki"
|
||||||
|
forum: "https://discuss.elastic.co/"
|
||||||
|
xpack-forum: "https://discuss.elastic.co/c/50-x-pack"
|
||||||
|
security-forum: "https://discuss.elastic.co/c/x-pack/shield"
|
||||||
|
watcher-forum: "https://discuss.elastic.co/c/x-pack/watcher"
|
||||||
|
monitoring-forum: "https://discuss.elastic.co/c/x-pack/marvel"
|
||||||
|
graph-forum: "https://discuss.elastic.co/c/x-pack/graph"
|
||||||
|
apm-forum: "https://discuss.elastic.co/c/apm"
|
||||||
|
enterprise-search-ref: "https://www.elastic.co/guide/en/enterprise-search/current"
|
||||||
|
app-search-ref: "https://www.elastic.co/guide/en/app-search/current"
|
||||||
|
workplace-search-ref: "https://www.elastic.co/guide/en/workplace-search/current"
|
||||||
|
enterprise-search-node-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/enterprise-search-node/current"
|
||||||
|
enterprise-search-php-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/php/current"
|
||||||
|
enterprise-search-python-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/python/current"
|
||||||
|
enterprise-search-ruby-ref: "https://www.elastic.co/guide/en/enterprise-search-clients/ruby/current"
|
||||||
|
elastic-maps-service: "https://maps.elastic.co"
|
||||||
|
integrations-docs: "https://docs.elastic.co/en/integrations"
|
||||||
|
integrations-devguide: "https://www.elastic.co/guide/en/integrations-developer/current"
|
||||||
|
time-units: "https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#time-units"
|
||||||
|
byte-units: "https://www.elastic.co/guide/en/elasticsearch/reference/current/api-conventions.html#byte-units"
|
||||||
|
apm-py-ref-v: "https://www.elastic.co/guide/en/apm/agent/python/current"
|
||||||
|
apm-node-ref-v: "https://www.elastic.co/guide/en/apm/agent/nodejs/current"
|
||||||
|
apm-rum-ref-v: "https://www.elastic.co/guide/en/apm/agent/rum-js/current"
|
||||||
|
apm-ruby-ref-v: "https://www.elastic.co/guide/en/apm/agent/ruby/current"
|
||||||
|
apm-java-ref-v: "https://www.elastic.co/guide/en/apm/agent/java/current"
|
||||||
|
apm-go-ref-v: "https://www.elastic.co/guide/en/apm/agent/go/current"
|
||||||
|
apm-ios-ref-v: "https://www.elastic.co/guide/en/apm/agent/swift/current"
|
||||||
|
apm-dotnet-ref-v: "https://www.elastic.co/guide/en/apm/agent/dotnet/current"
|
||||||
|
apm-php-ref-v: "https://www.elastic.co/guide/en/apm/agent/php/current"
|
||||||
|
ecloud: "Elastic Cloud"
|
||||||
|
esf: "Elastic Serverless Forwarder"
|
||||||
|
ess: "Elasticsearch Service"
|
||||||
|
ece: "Elastic Cloud Enterprise"
|
||||||
|
eck: "Elastic Cloud on Kubernetes"
|
||||||
|
serverless-full: "Elastic Cloud Serverless"
|
||||||
|
serverless-short: "Serverless"
|
||||||
|
es-serverless: "Elasticsearch Serverless"
|
||||||
|
es3: "Elasticsearch Serverless"
|
||||||
|
obs-serverless: "Elastic Observability Serverless"
|
||||||
|
sec-serverless: "Elastic Security Serverless"
|
||||||
|
serverless-docs: "https://docs.elastic.co/serverless"
|
||||||
|
cloud: "https://www.elastic.co/guide/en/cloud/current"
|
||||||
|
ess-utm-params: "?page=docs&placement=docs-body"
|
||||||
|
ess-baymax: "?page=docs&placement=docs-body"
|
||||||
|
ess-trial: "https://cloud.elastic.co/registration?page=docs&placement=docs-body"
|
||||||
|
ess-product: "https://www.elastic.co/cloud/elasticsearch-service?page=docs&placement=docs-body"
|
||||||
|
ess-console: "https://cloud.elastic.co?page=docs&placement=docs-body"
|
||||||
|
ess-console-name: "Elasticsearch Service Console"
|
||||||
|
ess-deployments: "https://cloud.elastic.co/deployments?page=docs&placement=docs-body"
|
||||||
|
ece-ref: "https://www.elastic.co/guide/en/cloud-enterprise/current"
|
||||||
|
eck-ref: "https://www.elastic.co/guide/en/cloud-on-k8s/current"
|
||||||
|
ess-leadin: "You can run Elasticsearch on your own hardware or use our hosted Elasticsearch Service that is available on AWS, GCP, and Azure. https://cloud.elastic.co/registration{ess-utm-params}[Try the Elasticsearch Service for free]."
|
||||||
|
ess-leadin-short: "Our hosted Elasticsearch Service is available on AWS, GCP, and Azure, and you can https://cloud.elastic.co/registration{ess-utm-params}[try it for free]."
|
||||||
|
ess-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/logo_cloud.svg[link=\"https://cloud.elastic.co/registration{ess-utm-params}\", title=\"Supported on Elasticsearch Service\"]"
|
||||||
|
ece-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/logo_cloud_ece.svg[link=\"https://cloud.elastic.co/registration{ess-utm-params}\", title=\"Supported on Elastic Cloud Enterprise\"]"
|
||||||
|
cloud-only: "This feature is designed for indirect use by https://cloud.elastic.co/registration{ess-utm-params}[Elasticsearch Service], https://www.elastic.co/guide/en/cloud-enterprise/{ece-version-link}[Elastic Cloud Enterprise], and https://www.elastic.co/guide/en/cloud-on-k8s/current[Elastic Cloud on Kubernetes]. Direct use is not supported."
|
||||||
|
ess-setting-change: "image:https://doc-icons.s3.us-east-2.amazonaws.com/logo_cloud.svg[link=\"{ess-trial}\", title=\"Supported on {ess}\"] indicates a change to a supported https://www.elastic.co/guide/en/cloud/current/ec-add-user-settings.html[user setting] for Elasticsearch Service."
|
||||||
|
ess-skip-section: "If you use Elasticsearch Service, skip this section. Elasticsearch Service handles these changes for you."
|
||||||
|
api-cloud: "https://www.elastic.co/docs/api/doc/cloud"
|
||||||
|
api-ece: "https://www.elastic.co/docs/api/doc/cloud-enterprise"
|
||||||
|
api-kibana-serverless: "https://www.elastic.co/docs/api/doc/serverless"
|
||||||
|
es-feature-flag: "This feature is in development and not yet available for use. This documentation is provided for informational purposes only."
|
||||||
|
es-ref-dir: "'{{elasticsearch-root}}/docs/reference'"
|
||||||
|
apm-app: "APM app"
|
||||||
|
uptime-app: "Uptime app"
|
||||||
|
synthetics-app: "Synthetics app"
|
||||||
|
logs-app: "Logs app"
|
||||||
|
metrics-app: "Metrics app"
|
||||||
|
infrastructure-app: "Infrastructure app"
|
||||||
|
siem-app: "SIEM app"
|
||||||
|
security-app: "Elastic Security app"
|
||||||
|
ml-app: "Machine Learning"
|
||||||
|
dev-tools-app: "Dev Tools"
|
||||||
|
ingest-manager-app: "Ingest Manager"
|
||||||
|
stack-manage-app: "Stack Management"
|
||||||
|
stack-monitor-app: "Stack Monitoring"
|
||||||
|
alerts-ui: "Alerts and Actions"
|
||||||
|
rules-ui: "Rules"
|
||||||
|
rac-ui: "Rules and Connectors"
|
||||||
|
connectors-ui: "Connectors"
|
||||||
|
connectors-feature: "Actions and Connectors"
|
||||||
|
stack-rules-feature: "Stack Rules"
|
||||||
|
user-experience: "User Experience"
|
||||||
|
ems: "Elastic Maps Service"
|
||||||
|
ems-init: "EMS"
|
||||||
|
hosted-ems: "Elastic Maps Server"
|
||||||
|
ipm-app: "Index Pattern Management"
|
||||||
|
ingest-pipelines: "ingest pipelines"
|
||||||
|
ingest-pipelines-app: "Ingest Pipelines"
|
||||||
|
ingest-pipelines-cap: "Ingest pipelines"
|
||||||
|
ls-pipelines: "Logstash pipelines"
|
||||||
|
ls-pipelines-app: "Logstash Pipelines"
|
||||||
|
maint-windows: "maintenance windows"
|
||||||
|
maint-windows-app: "Maintenance Windows"
|
||||||
|
maint-windows-cap: "Maintenance windows"
|
||||||
|
custom-roles-app: "Custom Roles"
|
||||||
|
data-source: "data view"
|
||||||
|
data-sources: "data views"
|
||||||
|
data-source-caps: "Data View"
|
||||||
|
data-sources-caps: "Data Views"
|
||||||
|
data-source-cap: "Data view"
|
||||||
|
data-sources-cap: "Data views"
|
||||||
|
project-settings: "Project settings"
|
||||||
|
manage-app: "Management"
|
||||||
|
index-manage-app: "Index Management"
|
||||||
|
data-views-app: "Data Views"
|
||||||
|
rules-app: "Rules"
|
||||||
|
saved-objects-app: "Saved Objects"
|
||||||
|
tags-app: "Tags"
|
||||||
|
api-keys-app: "API keys"
|
||||||
|
transforms-app: "Transforms"
|
||||||
|
connectors-app: "Connectors"
|
||||||
|
files-app: "Files"
|
||||||
|
reports-app: "Reports"
|
||||||
|
maps-app: "Maps"
|
||||||
|
alerts-app: "Alerts"
|
||||||
|
crawler: "Enterprise Search web crawler"
|
||||||
|
ents: "Enterprise Search"
|
||||||
|
app-search-crawler: "App Search web crawler"
|
||||||
|
agent: "Elastic Agent"
|
||||||
|
agents: "Elastic Agents"
|
||||||
|
fleet: "Fleet"
|
||||||
|
fleet-server: "Fleet Server"
|
||||||
|
integrations-server: "Integrations Server"
|
||||||
|
ingest-manager: "Ingest Manager"
|
||||||
|
ingest-management: "ingest management"
|
||||||
|
package-manager: "Elastic Package Manager"
|
||||||
|
integrations: "Integrations"
|
||||||
|
package-registry: "Elastic Package Registry"
|
||||||
|
artifact-registry: "Elastic Artifact Registry"
|
||||||
|
aws: "AWS"
|
||||||
|
stack: "Elastic Stack"
|
||||||
|
xpack: "X-Pack"
|
||||||
|
es: "Elasticsearch"
|
||||||
|
kib: "Kibana"
|
||||||
|
esms: "Elastic Stack Monitoring Service"
|
||||||
|
esms-init: "ESMS"
|
||||||
|
ls: "Logstash"
|
||||||
|
beats: "Beats"
|
||||||
|
auditbeat: "Auditbeat"
|
||||||
|
filebeat: "Filebeat"
|
||||||
|
heartbeat: "Heartbeat"
|
||||||
|
metricbeat: "Metricbeat"
|
||||||
|
packetbeat: "Packetbeat"
|
||||||
|
winlogbeat: "Winlogbeat"
|
||||||
|
functionbeat: "Functionbeat"
|
||||||
|
journalbeat: "Journalbeat"
|
||||||
|
es-sql: "Elasticsearch SQL"
|
||||||
|
esql: "ES|QL"
|
||||||
|
elastic-agent: "Elastic Agent"
|
||||||
|
k8s: "Kubernetes"
|
||||||
|
log-driver-long: "Elastic Logging Plugin for Docker"
|
||||||
|
security: "X-Pack security"
|
||||||
|
security-features: "security features"
|
||||||
|
operator-feature: "operator privileges feature"
|
||||||
|
es-security-features: "Elasticsearch security features"
|
||||||
|
stack-security-features: "Elastic Stack security features"
|
||||||
|
endpoint-sec: "Endpoint Security"
|
||||||
|
endpoint-cloud-sec: "Endpoint and Cloud Security"
|
||||||
|
elastic-defend: "Elastic Defend"
|
||||||
|
elastic-sec: "Elastic Security"
|
||||||
|
elastic-endpoint: "Elastic Endpoint"
|
||||||
|
swimlane: "Swimlane"
|
||||||
|
sn: "ServiceNow"
|
||||||
|
sn-itsm: "ServiceNow ITSM"
|
||||||
|
sn-itom: "ServiceNow ITOM"
|
||||||
|
sn-sir: "ServiceNow SecOps"
|
||||||
|
jira: "Jira"
|
||||||
|
ibm-r: "IBM Resilient"
|
||||||
|
webhook: "Webhook"
|
||||||
|
webhook-cm: "Webhook - Case Management"
|
||||||
|
opsgenie: "Opsgenie"
|
||||||
|
bedrock: "Amazon Bedrock"
|
||||||
|
gemini: "Google Gemini"
|
||||||
|
hive: "TheHive"
|
||||||
|
monitoring: "X-Pack monitoring"
|
||||||
|
monitor-features: "monitoring features"
|
||||||
|
stack-monitor-features: "Elastic Stack monitoring features"
|
||||||
|
watcher: "Watcher"
|
||||||
|
alert-features: "alerting features"
|
||||||
|
reporting: "X-Pack reporting"
|
||||||
|
report-features: "reporting features"
|
||||||
|
graph: "X-Pack graph"
|
||||||
|
graph-features: "graph analytics features"
|
||||||
|
searchprofiler: "Search Profiler"
|
||||||
|
xpackml: "X-Pack machine learning"
|
||||||
|
ml: "machine learning"
|
||||||
|
ml-cap: "Machine learning"
|
||||||
|
ml-init: "ML"
|
||||||
|
ml-features: "machine learning features"
|
||||||
|
stack-ml-features: "Elastic Stack machine learning features"
|
||||||
|
ccr: "cross-cluster replication"
|
||||||
|
ccr-cap: "Cross-cluster replication"
|
||||||
|
ccr-init: "CCR"
|
||||||
|
ccs: "cross-cluster search"
|
||||||
|
ccs-cap: "Cross-cluster search"
|
||||||
|
ccs-init: "CCS"
|
||||||
|
ilm: "index lifecycle management"
|
||||||
|
ilm-cap: "Index lifecycle management"
|
||||||
|
ilm-init: "ILM"
|
||||||
|
dlm: "data lifecycle management"
|
||||||
|
dlm-cap: "Data lifecycle management"
|
||||||
|
dlm-init: "DLM"
|
||||||
|
search-snap: "searchable snapshot"
|
||||||
|
search-snaps: "searchable snapshots"
|
||||||
|
search-snaps-cap: "Searchable snapshots"
|
||||||
|
slm: "snapshot lifecycle management"
|
||||||
|
slm-cap: "Snapshot lifecycle management"
|
||||||
|
slm-init: "SLM"
|
||||||
|
rollup-features: "data rollup features"
|
||||||
|
ipm: "index pattern management"
|
||||||
|
ipm-cap: "Index pattern"
|
||||||
|
rollup: "rollup"
|
||||||
|
rollup-cap: "Rollup"
|
||||||
|
rollups: "rollups"
|
||||||
|
rollups-cap: "Rollups"
|
||||||
|
rollup-job: "rollup job"
|
||||||
|
rollup-jobs: "rollup jobs"
|
||||||
|
rollup-jobs-cap: "Rollup jobs"
|
||||||
|
dfeed: "datafeed"
|
||||||
|
dfeeds: "datafeeds"
|
||||||
|
dfeed-cap: "Datafeed"
|
||||||
|
dfeeds-cap: "Datafeeds"
|
||||||
|
ml-jobs: "machine learning jobs"
|
||||||
|
ml-jobs-cap: "Machine learning jobs"
|
||||||
|
anomaly-detect: "anomaly detection"
|
||||||
|
anomaly-detect-cap: "Anomaly detection"
|
||||||
|
anomaly-job: "anomaly detection job"
|
||||||
|
anomaly-jobs: "anomaly detection jobs"
|
||||||
|
anomaly-jobs-cap: "Anomaly detection jobs"
|
||||||
|
dataframe: "data frame"
|
||||||
|
dataframes: "data frames"
|
||||||
|
dataframe-cap: "Data frame"
|
||||||
|
dataframes-cap: "Data frames"
|
||||||
|
watcher-transform: "payload transform"
|
||||||
|
watcher-transforms: "payload transforms"
|
||||||
|
watcher-transform-cap: "Payload transform"
|
||||||
|
watcher-transforms-cap: "Payload transforms"
|
||||||
|
transform: "transform"
|
||||||
|
transforms: "transforms"
|
||||||
|
transform-cap: "Transform"
|
||||||
|
transforms-cap: "Transforms"
|
||||||
|
dataframe-transform: "transform"
|
||||||
|
dataframe-transform-cap: "Transform"
|
||||||
|
dataframe-transforms: "transforms"
|
||||||
|
dataframe-transforms-cap: "Transforms"
|
||||||
|
dfanalytics-cap: "Data frame analytics"
|
||||||
|
dfanalytics: "data frame analytics"
|
||||||
|
dataframe-analytics-config: "'{dataframe} analytics config'"
|
||||||
|
dfanalytics-job: "'{dataframe} analytics job'"
|
||||||
|
dfanalytics-jobs: "'{dataframe} analytics jobs'"
|
||||||
|
dfanalytics-jobs-cap: "'{dataframe-cap} analytics jobs'"
|
||||||
|
cdataframe: "continuous data frame"
|
||||||
|
cdataframes: "continuous data frames"
|
||||||
|
cdataframe-cap: "Continuous data frame"
|
||||||
|
cdataframes-cap: "Continuous data frames"
|
||||||
|
cdataframe-transform: "continuous transform"
|
||||||
|
cdataframe-transforms: "continuous transforms"
|
||||||
|
cdataframe-transforms-cap: "Continuous transforms"
|
||||||
|
ctransform: "continuous transform"
|
||||||
|
ctransform-cap: "Continuous transform"
|
||||||
|
ctransforms: "continuous transforms"
|
||||||
|
ctransforms-cap: "Continuous transforms"
|
||||||
|
oldetection: "outlier detection"
|
||||||
|
oldetection-cap: "Outlier detection"
|
||||||
|
olscore: "outlier score"
|
||||||
|
olscores: "outlier scores"
|
||||||
|
fiscore: "feature influence score"
|
||||||
|
evaluatedf-api: "evaluate {dataframe} analytics API"
|
||||||
|
evaluatedf-api-cap: "Evaluate {dataframe} analytics API"
|
||||||
|
binarysc: "binary soft classification"
|
||||||
|
binarysc-cap: "Binary soft classification"
|
||||||
|
regression: "regression"
|
||||||
|
regression-cap: "Regression"
|
||||||
|
reganalysis: "regression analysis"
|
||||||
|
reganalysis-cap: "Regression analysis"
|
||||||
|
depvar: "dependent variable"
|
||||||
|
feature-var: "feature variable"
|
||||||
|
feature-vars: "feature variables"
|
||||||
|
feature-vars-cap: "Feature variables"
|
||||||
|
classification: "classification"
|
||||||
|
classification-cap: "Classification"
|
||||||
|
classanalysis: "classification analysis"
|
||||||
|
classanalysis-cap: "Classification analysis"
|
||||||
|
infer-cap: "Inference"
|
||||||
|
infer: "inference"
|
||||||
|
lang-ident-cap: "Language identification"
|
||||||
|
lang-ident: "language identification"
|
||||||
|
data-viz: "Data Visualizer"
|
||||||
|
file-data-viz: "File Data Visualizer"
|
||||||
|
feat-imp: "feature importance"
|
||||||
|
feat-imp-cap: "Feature importance"
|
||||||
|
nlp: "natural language processing"
|
||||||
|
nlp-cap: "Natural language processing"
|
||||||
|
apm-agent: "APM agent"
|
||||||
|
apm-go-agent: "Elastic APM Go agent"
|
||||||
|
apm-go-agents: "Elastic APM Go agents"
|
||||||
|
apm-ios-agent: "Elastic APM iOS agent"
|
||||||
|
apm-ios-agents: "Elastic APM iOS agents"
|
||||||
|
apm-java-agent: "Elastic APM Java agent"
|
||||||
|
apm-java-agents: "Elastic APM Java agents"
|
||||||
|
apm-dotnet-agent: "Elastic APM .NET agent"
|
||||||
|
apm-dotnet-agents: "Elastic APM .NET agents"
|
||||||
|
apm-node-agent: "Elastic APM Node.js agent"
|
||||||
|
apm-node-agents: "Elastic APM Node.js agents"
|
||||||
|
apm-php-agent: "Elastic APM PHP agent"
|
||||||
|
apm-php-agents: "Elastic APM PHP agents"
|
||||||
|
apm-py-agent: "Elastic APM Python agent"
|
||||||
|
apm-py-agents: "Elastic APM Python agents"
|
||||||
|
apm-ruby-agent: "Elastic APM Ruby agent"
|
||||||
|
apm-ruby-agents: "Elastic APM Ruby agents"
|
||||||
|
apm-rum-agent: "Elastic APM Real User Monitoring (RUM) JavaScript agent"
|
||||||
|
apm-rum-agents: "Elastic APM RUM JavaScript agents"
|
||||||
|
apm-lambda-ext: "Elastic APM AWS Lambda extension"
|
||||||
|
project-monitors: "project monitors"
|
||||||
|
project-monitors-cap: "Project monitors"
|
||||||
|
private-location: "Private Location"
|
||||||
|
private-locations: "Private Locations"
|
||||||
|
pwd: "YOUR_PASSWORD"
|
||||||
|
esh: "ES-Hadoop"
|
||||||
|
default-dist: "default distribution"
|
||||||
|
oss-dist: "OSS-only distribution"
|
||||||
|
observability: "Observability"
|
||||||
|
api-request-title: "Request"
|
||||||
|
api-prereq-title: "Prerequisites"
|
||||||
|
api-description-title: "Description"
|
||||||
|
api-path-parms-title: "Path parameters"
|
||||||
|
api-query-parms-title: "Query parameters"
|
||||||
|
api-request-body-title: "Request body"
|
||||||
|
api-response-codes-title: "Response codes"
|
||||||
|
api-response-body-title: "Response body"
|
||||||
|
api-example-title: "Example"
|
||||||
|
api-examples-title: "Examples"
|
||||||
|
api-definitions-title: "Properties"
|
||||||
|
multi-arg: "†footnoteref:[multi-arg,This parameter accepts multiple arguments.]"
|
||||||
|
multi-arg-ref: "†footnoteref:[multi-arg]"
|
||||||
|
yes-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/icon-yes.png[Yes,20,15]"
|
||||||
|
no-icon: "image:https://doc-icons.s3.us-east-2.amazonaws.com/icon-no.png[No,20,15]"
|
||||||
|
es-repo: "https://github.com/elastic/elasticsearch/"
|
||||||
|
es-issue: "https://github.com/elastic/elasticsearch/issues/"
|
||||||
|
es-pull: "https://github.com/elastic/elasticsearch/pull/"
|
||||||
|
es-commit: "https://github.com/elastic/elasticsearch/commit/"
|
||||||
|
kib-repo: "https://github.com/elastic/kibana/"
|
||||||
|
kib-issue: "https://github.com/elastic/kibana/issues/"
|
||||||
|
kibana-issue: "'{kib-repo}issues/'"
|
||||||
|
kib-pull: "https://github.com/elastic/kibana/pull/"
|
||||||
|
kibana-pull: "'{kib-repo}pull/'"
|
||||||
|
kib-commit: "https://github.com/elastic/kibana/commit/"
|
||||||
|
ml-repo: "https://github.com/elastic/ml-cpp/"
|
||||||
|
ml-issue: "https://github.com/elastic/ml-cpp/issues/"
|
||||||
|
ml-pull: "https://github.com/elastic/ml-cpp/pull/"
|
||||||
|
ml-commit: "https://github.com/elastic/ml-cpp/commit/"
|
||||||
|
apm-repo: "https://github.com/elastic/apm-server/"
|
||||||
|
apm-issue: "https://github.com/elastic/apm-server/issues/"
|
||||||
|
apm-pull: "https://github.com/elastic/apm-server/pull/"
|
||||||
|
kibana-blob: "https://github.com/elastic/kibana/blob/current/"
|
||||||
|
apm-get-started-ref: "https://www.elastic.co/guide/en/apm/get-started/current"
|
||||||
|
apm-server-ref: "https://www.elastic.co/guide/en/apm/server/current"
|
||||||
|
apm-server-ref-v: "https://www.elastic.co/guide/en/apm/server/current"
|
||||||
|
apm-server-ref-m: "https://www.elastic.co/guide/en/apm/server/master"
|
||||||
|
apm-server-ref-62: "https://www.elastic.co/guide/en/apm/server/6.2"
|
||||||
|
apm-server-ref-64: "https://www.elastic.co/guide/en/apm/server/6.4"
|
||||||
|
apm-server-ref-70: "https://www.elastic.co/guide/en/apm/server/7.0"
|
||||||
|
apm-overview-ref-v: "https://www.elastic.co/guide/en/apm/get-started/current"
|
||||||
|
apm-overview-ref-70: "https://www.elastic.co/guide/en/apm/get-started/7.0"
|
||||||
|
apm-overview-ref-m: "https://www.elastic.co/guide/en/apm/get-started/master"
|
||||||
|
infra-guide: "https://www.elastic.co/guide/en/infrastructure/guide/current"
|
||||||
|
a-data-source: "a data view"
|
||||||
|
icon-bug: "pass:[<span class=\"eui-icon icon-bug\"></span>]"
|
||||||
|
icon-checkInCircleFilled: "pass:[<span class=\"eui-icon icon-checkInCircleFilled\"></span>]"
|
||||||
|
icon-warningFilled: "pass:[<span class=\"eui-icon icon-warningFilled\"></span>]"
|
@ -1,14 +0,0 @@
|
|||||||
= Eland Python Client
|
|
||||||
|
|
||||||
:doctype: book
|
|
||||||
|
|
||||||
include::{asciidoc-dir}/../../shared/versions/stack/{source_branch}.asciidoc[]
|
|
||||||
include::{asciidoc-dir}/../../shared/attributes.asciidoc[]
|
|
||||||
|
|
||||||
include::overview.asciidoc[]
|
|
||||||
|
|
||||||
include::installation.asciidoc[]
|
|
||||||
|
|
||||||
include::dataframes.asciidoc[]
|
|
||||||
|
|
||||||
include::machine-learning.asciidoc[]
|
|
@ -1,16 +0,0 @@
|
|||||||
[[installation]]
|
|
||||||
== Installation
|
|
||||||
|
|
||||||
Eland can be installed with https://pip.pypa.io[pip] from https://pypi.org/project/eland[PyPI]. We recommend https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/[using a virtual environment] when installing with pip:
|
|
||||||
|
|
||||||
[source,sh]
|
|
||||||
-----------------------------
|
|
||||||
$ python -m pip install eland
|
|
||||||
-----------------------------
|
|
||||||
|
|
||||||
Alternatively, Eland can be installed with https://docs.conda.io[Conda] from https://anaconda.org/conda-forge/eland[Conda Forge]:
|
|
||||||
|
|
||||||
[source,sh]
|
|
||||||
------------------------------------
|
|
||||||
$ conda install -c conda-forge eland
|
|
||||||
------------------------------------
|
|
@ -1,242 +0,0 @@
|
|||||||
[[machine-learning]]
|
|
||||||
== Machine Learning
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[ml-trained-models]]
|
|
||||||
=== Trained models
|
|
||||||
|
|
||||||
Eland allows transforming trained models from scikit-learn, XGBoost,
|
|
||||||
and LightGBM libraries to be serialized and used as an inference
|
|
||||||
model in {es}.
|
|
||||||
|
|
||||||
[source,python]
|
|
||||||
------------------------
|
|
||||||
>>> from xgboost import XGBClassifier
|
|
||||||
>>> from eland.ml import MLModel
|
|
||||||
|
|
||||||
# Train and exercise an XGBoost ML model locally
|
|
||||||
>>> xgb_model = XGBClassifier(booster="gbtree")
|
|
||||||
>>> xgb_model.fit(training_data[0], training_data[1])
|
|
||||||
|
|
||||||
>>> xgb_model.predict(training_data[0])
|
|
||||||
[0 1 1 0 1 0 0 0 1 0]
|
|
||||||
|
|
||||||
# Import the model into Elasticsearch
|
|
||||||
>>> es_model = MLModel.import_model(
|
|
||||||
es_client="http://localhost:9200",
|
|
||||||
model_id="xgb-classifier",
|
|
||||||
model=xgb_model,
|
|
||||||
feature_names=["f0", "f1", "f2", "f3", "f4"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Exercise the ML model in Elasticsearch with the training data
|
|
||||||
>>> es_model.predict(training_data[0])
|
|
||||||
[0 1 1 0 1 0 0 0 1 0]
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[ml-nlp-pytorch]]
|
|
||||||
=== Natural language processing (NLP) with PyTorch
|
|
||||||
|
|
||||||
IMPORTANT: You need to install the appropriate version of PyTorch to import an
|
|
||||||
NLP model. Run `python -m pip install 'eland[pytorch]'` to install that version.
|
|
||||||
|
|
||||||
For NLP tasks, Eland enables you to import PyTorch models into {es}. Use the
|
|
||||||
`eland_import_hub_model` script to download and install supported
|
|
||||||
https://huggingface.co/transformers[transformer models] from the
|
|
||||||
https://huggingface.co/models[Hugging Face model hub]. For example:
|
|
||||||
|
|
||||||
[source,bash]
|
|
||||||
------------------------
|
|
||||||
$ eland_import_hub_model <authentication> \ <1>
|
|
||||||
--url http://localhost:9200/ \ <2>
|
|
||||||
--hub-model-id elastic/distilbert-base-cased-finetuned-conll03-english \ <3>
|
|
||||||
--task-type ner \ <4>
|
|
||||||
--start
|
|
||||||
------------------------
|
|
||||||
<1> Use an authentication method to access your cluster. Refer to <<ml-nlp-pytorch-auth>>.
|
|
||||||
<2> The cluster URL. Alternatively, use `--cloud-id`.
|
|
||||||
<3> Specify the identifier for the model in the Hugging Face model hub.
|
|
||||||
<4> Specify the type of NLP task. Supported values are `fill_mask`, `ner`,
|
|
||||||
`question_answering`, `text_classification`, `text_embedding`, `text_expansion`,
|
|
||||||
`text_similarity` and `zero_shot_classification`.
|
|
||||||
|
|
||||||
For more information about the available options, run `eland_import_hub_model` with the `--help` option.
|
|
||||||
|
|
||||||
[source,bash]
|
|
||||||
------------------------
|
|
||||||
$ eland_import_hub_model --help
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[ml-nlp-pytorch-docker]]
|
|
||||||
==== Import model with Docker
|
|
||||||
|
|
||||||
IMPORTANT: To use the Docker container, you need to clone the Eland repository: https://github.com/elastic/eland
|
|
||||||
|
|
||||||
If you want to use Eland without installing it, you can use the Docker image:
|
|
||||||
|
|
||||||
You can use the container interactively:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ docker run -it --rm --network host docker.elastic.co/eland/eland
|
|
||||||
```
|
|
||||||
|
|
||||||
Running installed scripts is also possible without an interactive shell, for example:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -it --rm docker.elastic.co/eland/eland \
|
|
||||||
eland_import_hub_model \
|
|
||||||
--url $ELASTICSEARCH_URL \
|
|
||||||
--hub-model-id elastic/distilbert-base-uncased-finetuned-conll03-english \
|
|
||||||
--start
|
|
||||||
```
|
|
||||||
|
|
||||||
Replace the `$ELASTICSEARCH_URL` with the URL for your Elasticsearch cluster. For authentication purposes, include an administrator username and password in the URL in the following format: `https://username:password@host:port`.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[ml-nlp-pytorch-air-gapped]]
|
|
||||||
==== Install models in an air-gapped environment
|
|
||||||
|
|
||||||
You can install models in a restricted or closed network by pointing the
|
|
||||||
`eland_import_hub_model` script to local files.
|
|
||||||
|
|
||||||
For an offline install of a Hugging Face model, the model first needs to be
|
|
||||||
cloned locally, Git and https://git-lfs.com/[Git Large File Storage] are
|
|
||||||
required to be installed in your system.
|
|
||||||
|
|
||||||
1. Select a model you want to use from Hugging Face. Refer to the
|
|
||||||
{ml-docs}/ml-nlp-model-ref.html[compatible third party model] list for more
|
|
||||||
information on the supported architectures.
|
|
||||||
|
|
||||||
2. Clone the selected model from Hugging Face by using the model URL. For
|
|
||||||
example:
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
----
|
|
||||||
git clone https://huggingface.co/dslim/bert-base-NER
|
|
||||||
----
|
|
||||||
This command results in a local copy of
|
|
||||||
of the model in the directory `bert-base-NER`.
|
|
||||||
--
|
|
||||||
|
|
||||||
3. Use the `eland_import_hub_model` script with the `--hub-model-id` set to the
|
|
||||||
directory of the cloned model to install it:
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
----
|
|
||||||
eland_import_hub_model \
|
|
||||||
--url 'XXXX' \
|
|
||||||
--hub-model-id /PATH/TO/MODEL \
|
|
||||||
--task-type ner \
|
|
||||||
--es-username elastic --es-password XXX \
|
|
||||||
--es-model-id bert-base-ner
|
|
||||||
----
|
|
||||||
|
|
||||||
If you use the Docker image to run `eland_import_hub_model` you must bind mount
|
|
||||||
the model directory, so the container can read the files:
|
|
||||||
|
|
||||||
[source,bash]
|
|
||||||
----
|
|
||||||
docker run --mount type=bind,source=/PATH/TO/MODEL,destination=/model,readonly -it --rm docker.elastic.co/eland/eland \
|
|
||||||
eland_import_hub_model \
|
|
||||||
--url 'XXXX' \
|
|
||||||
--hub-model-id /model \
|
|
||||||
--task-type ner \
|
|
||||||
--es-username elastic --es-password XXX \
|
|
||||||
--es-model-id bert-base-ner
|
|
||||||
----
|
|
||||||
Once it's uploaded to {es}, the model will have the ID specified by
|
|
||||||
`--es-model-id`. If it is not set, the model ID is derived from
|
|
||||||
`--hub-model-id`; spaces and path delimiters are converted to double
|
|
||||||
underscores `__`.
|
|
||||||
|
|
||||||
--
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[ml-nlp-pytorch-proxy]]
|
|
||||||
==== Connect to Elasticsearch through a proxy
|
|
||||||
|
|
||||||
Behind the scenes, Eland uses the `requests` Python library, which
|
|
||||||
https://requests.readthedocs.io/en/latest/user/advanced/#proxies[allows configuring
|
|
||||||
proxies through an environment variable]. For example, to use an HTTP proxy to connect to
|
|
||||||
an HTTPS Elasticsearch cluster, you need to set the `HTTPS_PROXY` environment variable
|
|
||||||
when invoking Eland:
|
|
||||||
|
|
||||||
[source,bash]
|
|
||||||
--------------------------------------------------
|
|
||||||
HTTPS_PROXY=http://proxy-host:proxy-port eland_import_hub_model ...
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
If you disabled security on your Elasticsearch cluster, you should use `HTTP_PROXY`
|
|
||||||
instead.
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[ml-nlp-pytorch-auth]]
|
|
||||||
==== Authentication methods
|
|
||||||
|
|
||||||
The following authentication options are available when using the import script:
|
|
||||||
|
|
||||||
* Elasticsearch username and password authentication (specified with the `-u` and `-p` options):
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
--------------------------------------------------
|
|
||||||
eland_import_hub_model -u <username> -p <password> --cloud-id <cloud-id> ...
|
|
||||||
--------------------------------------------------
|
|
||||||
These `-u` and `-p` options also work when you use `--url`.
|
|
||||||
--
|
|
||||||
|
|
||||||
* Elasticsearch username and password authentication (embedded in the URL):
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
--------------------------------------------------
|
|
||||||
eland_import_hub_model --url https://<user>:<password>@<hostname>:<port> ...
|
|
||||||
--------------------------------------------------
|
|
||||||
--
|
|
||||||
|
|
||||||
* Elasticsearch API key authentication:
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
--------------------------------------------------
|
|
||||||
eland_import_hub_model --es-api-key <api-key> --url https://<hostname>:<port> ...
|
|
||||||
--------------------------------------------------
|
|
||||||
--
|
|
||||||
|
|
||||||
* HuggingFace Hub access token (for private models):
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
--------------------------------------------------
|
|
||||||
eland_import_hub_model --hub-access-token <access-token> ...
|
|
||||||
--------------------------------------------------
|
|
||||||
--
|
|
||||||
|
|
||||||
[discrete]
|
|
||||||
[[ml-nlp-pytorch-tls]]
|
|
||||||
==== TLS/SSL
|
|
||||||
|
|
||||||
The following TLS/SSL options for Elasticsearch are available when using the import script:
|
|
||||||
|
|
||||||
|
|
||||||
* Specify alternate CA bundle to verify the cluster certificate:
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
--------------------------------------------------
|
|
||||||
eland_import_hub_model --ca-certs CA_CERTS ...
|
|
||||||
--------------------------------------------------
|
|
||||||
--
|
|
||||||
|
|
||||||
* Disable TLS/SSL verification altogether (strongly discouraged):
|
|
||||||
+
|
|
||||||
--
|
|
||||||
[source,bash]
|
|
||||||
--------------------------------------------------
|
|
||||||
eland_import_hub_model --insecure ...
|
|
||||||
--------------------------------------------------
|
|
||||||
--
|
|
@ -1,16 +1,16 @@
|
|||||||
[[dataframes]]
|
---
|
||||||
== Data Frames
|
mapped_pages:
|
||||||
|
- https://www.elastic.co/guide/en/elasticsearch/client/eland/current/dataframes.html
|
||||||
|
---
|
||||||
|
|
||||||
`eland.DataFrame` wraps an Elasticsearch index in a Pandas-like API
|
# Data Frames [dataframes]
|
||||||
and defers all processing and filtering of data to Elasticsearch
|
|
||||||
instead of your local machine. This means you can process large
|
|
||||||
amounts of data within Elasticsearch from a Jupyter Notebook
|
|
||||||
without overloading your machine.
|
|
||||||
|
|
||||||
[source,python]
|
`eland.DataFrame` wraps an Elasticsearch index in a Pandas-like API and defers all processing and filtering of data to Elasticsearch instead of your local machine. This means you can process large amounts of data within Elasticsearch from a Jupyter Notebook without overloading your machine.
|
||||||
-------------------------------------
|
|
||||||
|
```python
|
||||||
>>> import eland as ed
|
>>> import eland as ed
|
||||||
>>> # Connect to 'flights' index via localhost Elasticsearch node
|
>>>
|
||||||
|
# Connect to 'flights' index via localhost Elasticsearch node
|
||||||
>>> df = ed.DataFrame('http://localhost:9200', 'flights')
|
>>> df = ed.DataFrame('http://localhost:9200', 'flights')
|
||||||
|
|
||||||
# eland.DataFrame instance has the same API as pandas.DataFrame
|
# eland.DataFrame instance has the same API as pandas.DataFrame
|
||||||
@ -29,14 +29,14 @@ without overloading your machine.
|
|||||||
<class 'eland.dataframe.DataFrame'>
|
<class 'eland.dataframe.DataFrame'>
|
||||||
Index: 13059 entries, 0 to 13058
|
Index: 13059 entries, 0 to 13058
|
||||||
Data columns (total 27 columns):
|
Data columns (total 27 columns):
|
||||||
# Column Non-Null Count Dtype
|
# Column Non-Null Count Dtype
|
||||||
--- ------ -------------- -----
|
--- ------ -------------- -----
|
||||||
0 AvgTicketPrice 13059 non-null float64
|
0 AvgTicketPrice 13059 non-null float64
|
||||||
1 Cancelled 13059 non-null bool
|
1 Cancelled 13059 non-null bool
|
||||||
2 Carrier 13059 non-null object
|
2 Carrier 13059 non-null object
|
||||||
...
|
...
|
||||||
24 OriginWeather 13059 non-null object
|
24 OriginWeather 13059 non-null object
|
||||||
25 dayOfWeek 13059 non-null int64
|
25 dayOfWeek 13059 non-null int64
|
||||||
26 timestamp 13059 non-null datetime64[ns]
|
26 timestamp 13059 non-null datetime64[ns]
|
||||||
dtypes: bool(2), datetime64[ns](1), float64(5), int64(2), object(17)
|
dtypes: bool(2), datetime64[ns](1), float64(5), int64(2), object(17)
|
||||||
memory usage: 80.0 bytes
|
memory usage: 80.0 bytes
|
||||||
@ -59,4 +59,5 @@ Elasticsearch storage usage: 5.043 MB
|
|||||||
sum 9.261629e+07 8.204365e+06
|
sum 9.261629e+07 8.204365e+06
|
||||||
min 0.000000e+00 1.000205e+02
|
min 0.000000e+00 1.000205e+02
|
||||||
std 4.578263e+03 2.663867e+02
|
std 4.578263e+03 2.663867e+02
|
||||||
-------------------------------------
|
```
|
||||||
|
|
@ -1,33 +1,36 @@
|
|||||||
[[overview]]
|
---
|
||||||
== Overview
|
mapped_pages:
|
||||||
|
- https://www.elastic.co/guide/en/elasticsearch/client/eland/current/index.html
|
||||||
|
- https://www.elastic.co/guide/en/elasticsearch/client/eland/current/overview.html
|
||||||
|
navigation_title: Eland
|
||||||
|
---
|
||||||
|
|
||||||
Eland is a Python client and toolkit for DataFrames and {ml} in {es}.
|
# Eland Python client [overview]
|
||||||
Full documentation is available on https://eland.readthedocs.io[Read the Docs].
|
|
||||||
Source code is available on https://github.com/elastic/eland[GitHub].
|
|
||||||
|
|
||||||
[discrete]
|
Eland is a Python client and toolkit for DataFrames and {{ml}} in {{es}}. Full documentation is available on [Read the Docs](https://eland.readthedocs.io). Source code is available on [GitHub](https://github.com/elastic/eland).
|
||||||
=== Compatibility
|
|
||||||
|
|
||||||
- Supports Python 3.9+ and Pandas 1.5
|
|
||||||
- Supports {es} 8+ clusters, recommended 8.16 or later for all features to work.
|
|
||||||
Make sure your Eland major version matches the major version of your Elasticsearch cluster.
|
|
||||||
|
|
||||||
The recommended way to set your requirements in your `setup.py` or
|
## Compatibility [_compatibility]
|
||||||
`requirements.txt` is::
|
|
||||||
|
|
||||||
# Elasticsearch 8.x
|
* Supports Python 3.9+ and Pandas 1.5
|
||||||
eland>=8,<9
|
* Supports {{es}} 8+ clusters, recommended 8.16 or later for all features to work. Make sure your Eland major version matches the major version of your Elasticsearch cluster.
|
||||||
|
|
||||||
# Elasticsearch 7.x
|
The recommended way to set your requirements in your `setup.py` or `requirements.txt` is::
|
||||||
eland>=7,<8
|
|
||||||
|
|
||||||
[discrete]
|
```
|
||||||
=== Getting Started
|
# Elasticsearch 8.x
|
||||||
|
eland>=8,<9
|
||||||
|
```
|
||||||
|
```
|
||||||
|
# Elasticsearch 7.x
|
||||||
|
eland>=7,<8
|
||||||
|
```
|
||||||
|
|
||||||
Create a `DataFrame` object connected to an {es} cluster running on `http://localhost:9200`:
|
## Getting Started [_getting_started]
|
||||||
|
|
||||||
[source,python]
|
Create a `DataFrame` object connected to an {{es}} cluster running on `http://localhost:9200`:
|
||||||
------------------------------------
|
|
||||||
|
```python
|
||||||
>>> import eland as ed
|
>>> import eland as ed
|
||||||
>>> df = ed.DataFrame(
|
>>> df = ed.DataFrame(
|
||||||
... es_client="http://localhost:9200",
|
... es_client="http://localhost:9200",
|
||||||
@ -48,15 +51,14 @@ Create a `DataFrame` object connected to an {es} cluster running on `http://loca
|
|||||||
13058 858.144337 False ... 6 2018-02-11 14:54:34
|
13058 858.144337 False ... 6 2018-02-11 14:54:34
|
||||||
|
|
||||||
[13059 rows x 27 columns]
|
[13059 rows x 27 columns]
|
||||||
------------------------------------
|
```
|
||||||
|
|
||||||
[discrete]
|
|
||||||
==== Elastic Cloud
|
### Elastic Cloud [_elastic_cloud]
|
||||||
|
|
||||||
You can also connect Eland to an Elasticsearch instance in Elastic Cloud:
|
You can also connect Eland to an Elasticsearch instance in Elastic Cloud:
|
||||||
|
|
||||||
[source,python]
|
```python
|
||||||
------------------------------------
|
|
||||||
>>> import eland as ed
|
>>> import eland as ed
|
||||||
>>> from elasticsearch import Elasticsearch
|
>>> from elasticsearch import Elasticsearch
|
||||||
|
|
||||||
@ -73,16 +75,16 @@ You can also connect Eland to an Elasticsearch instance in Elastic Cloud:
|
|||||||
3 181.694216 True ... 0 2018-01-01 10:33:28
|
3 181.694216 True ... 0 2018-01-01 10:33:28
|
||||||
4 730.041778 False ... 0 2018-01-01 05:13:00
|
4 730.041778 False ... 0 2018-01-01 05:13:00
|
||||||
[5 rows x 27 columns]
|
[5 rows x 27 columns]
|
||||||
------------------------------------
|
```
|
||||||
|
|
||||||
Eland can be used for complex queries and aggregations:
|
Eland can be used for complex queries and aggregations:
|
||||||
|
|
||||||
[source,python]
|
```python
|
||||||
------------------------------------
|
|
||||||
>>> df[df.Carrier != "Kibana Airlines"].groupby("Carrier").mean(numeric_only=False)
|
>>> df[df.Carrier != "Kibana Airlines"].groupby("Carrier").mean(numeric_only=False)
|
||||||
AvgTicketPrice Cancelled timestamp
|
AvgTicketPrice Cancelled timestamp
|
||||||
Carrier
|
Carrier
|
||||||
ES-Air 630.235816 0.129814 2018-01-21 20:45:00.200000000
|
ES-Air 630.235816 0.129814 2018-01-21 20:45:00.200000000
|
||||||
JetBeats 627.457373 0.134698 2018-01-21 14:43:18.112400635
|
JetBeats 627.457373 0.134698 2018-01-21 14:43:18.112400635
|
||||||
Logstash Airways 624.581974 0.125188 2018-01-21 16:14:50.711798340
|
Logstash Airways 624.581974 0.125188 2018-01-21 16:14:50.711798340
|
||||||
------------------------------------
|
```
|
||||||
|
|
19
docs/reference/installation.md
Normal file
19
docs/reference/installation.md
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
---
|
||||||
|
mapped_pages:
|
||||||
|
- https://www.elastic.co/guide/en/elasticsearch/client/eland/current/installation.html
|
||||||
|
---
|
||||||
|
|
||||||
|
# Installation [installation]
|
||||||
|
|
||||||
|
Eland can be installed with [pip](https://pip.pypa.io) from [PyPI](https://pypi.org/project/eland). We recommend [using a virtual environment](https://packaging.python.org/en/latest/guides/installing-using-pip-and-virtual-environments/) when installing with pip:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ python -m pip install eland
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively, Eland can be installed with [Conda](https://docs.conda.io) from [Conda Forge](https://anaconda.org/conda-forge/eland):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ conda install -c conda-forge eland
|
||||||
|
```
|
||||||
|
|
197
docs/reference/machine-learning.md
Normal file
197
docs/reference/machine-learning.md
Normal file
@ -0,0 +1,197 @@
|
|||||||
|
---
|
||||||
|
mapped_pages:
|
||||||
|
- https://www.elastic.co/guide/en/elasticsearch/client/eland/current/machine-learning.html
|
||||||
|
---
|
||||||
|
|
||||||
|
# Machine Learning [machine-learning]
|
||||||
|
|
||||||
|
|
||||||
|
## Trained models [ml-trained-models]
|
||||||
|
|
||||||
|
Eland allows transforming trained models from scikit-learn, XGBoost, and LightGBM libraries to be serialized and used as an inference model in {{es}}.
|
||||||
|
|
||||||
|
```python
|
||||||
|
>>> from xgboost import XGBClassifier
|
||||||
|
>>> from eland.ml import MLModel
|
||||||
|
|
||||||
|
# Train and exercise an XGBoost ML model locally
|
||||||
|
>>> xgb_model = XGBClassifier(booster="gbtree")
|
||||||
|
>>> xgb_model.fit(training_data[0], training_data[1])
|
||||||
|
|
||||||
|
>>> xgb_model.predict(training_data[0])
|
||||||
|
[0 1 1 0 1 0 0 0 1 0]
|
||||||
|
|
||||||
|
# Import the model into Elasticsearch
|
||||||
|
>>> es_model = MLModel.import_model(
|
||||||
|
es_client="http://localhost:9200",
|
||||||
|
model_id="xgb-classifier",
|
||||||
|
model=xgb_model,
|
||||||
|
feature_names=["f0", "f1", "f2", "f3", "f4"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Exercise the ML model in Elasticsearch with the training data
|
||||||
|
>>> es_model.predict(training_data[0])
|
||||||
|
[0 1 1 0 1 0 0 0 1 0]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Natural language processing (NLP) with PyTorch [ml-nlp-pytorch]
|
||||||
|
|
||||||
|
::::{important}
|
||||||
|
You need to install the appropriate version of PyTorch to import an NLP model. Run `python -m pip install 'eland[pytorch]'` to install that version.
|
||||||
|
::::
|
||||||
|
|
||||||
|
|
||||||
|
For NLP tasks, Eland enables you to import PyTorch models into {{es}}. Use the `eland_import_hub_model` script to download and install supported [transformer models](https://huggingface.co/transformers) from the [Hugging Face model hub](https://huggingface.co/models). For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ eland_import_hub_model <authentication> \ <1>
|
||||||
|
--url http://localhost:9200/ \ <2>
|
||||||
|
--hub-model-id elastic/distilbert-base-cased-finetuned-conll03-english \ <3>
|
||||||
|
--task-type ner \ <4>
|
||||||
|
--start
|
||||||
|
```
|
||||||
|
|
||||||
|
1. Use an authentication method to access your cluster. Refer to [Authentication methods](machine-learning.md#ml-nlp-pytorch-auth).
|
||||||
|
2. The cluster URL. Alternatively, use `--cloud-id`.
|
||||||
|
3. Specify the identifier for the model in the Hugging Face model hub.
|
||||||
|
4. Specify the type of NLP task. Supported values are `fill_mask`, `ner`, `question_answering`, `text_classification`, `text_embedding`, `text_expansion`, `text_similarity` and `zero_shot_classification`.
|
||||||
|
|
||||||
|
|
||||||
|
For more information about the available options, run `eland_import_hub_model` with the `--help` option.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ eland_import_hub_model --help
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
### Import model with Docker [ml-nlp-pytorch-docker]
|
||||||
|
|
||||||
|
::::{important}
|
||||||
|
To use the Docker container, you need to clone the Eland repository: [https://github.com/elastic/eland](https://github.com/elastic/eland)
|
||||||
|
::::
|
||||||
|
|
||||||
|
|
||||||
|
If you want to use Eland without installing it, you can use the Docker image:
|
||||||
|
|
||||||
|
You can use the container interactively:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ docker run -it --rm --network host docker.elastic.co/eland/eland
|
||||||
|
```
|
||||||
|
|
||||||
|
Running installed scripts is also possible without an interactive shell, for example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -it --rm docker.elastic.co/eland/eland \
|
||||||
|
eland_import_hub_model \
|
||||||
|
--url $ELASTICSEARCH_URL \
|
||||||
|
--hub-model-id elastic/distilbert-base-uncased-finetuned-conll03-english \
|
||||||
|
--start
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace the `$ELASTICSEARCH_URL` with the URL for your Elasticsearch cluster. For authentication purposes, include an administrator username and password in the URL in the following format: `https://username:password@host:port`.
|
||||||
|
|
||||||
|
|
||||||
|
### Install models in an air-gapped environment [ml-nlp-pytorch-air-gapped]
|
||||||
|
|
||||||
|
You can install models in a restricted or closed network by pointing the `eland_import_hub_model` script to local files.
|
||||||
|
|
||||||
|
For an offline install of a Hugging Face model, the model first needs to be cloned locally, Git and [Git Large File Storage](https://git-lfs.com/) are required to be installed in your system.
|
||||||
|
|
||||||
|
1. Select a model you want to use from Hugging Face. Refer to the [compatible third party model](docs-content://explore-analyze/machine-learning/nlp/ml-nlp-model-ref.md) list for more information on the supported architectures.
|
||||||
|
2. Clone the selected model from Hugging Face by using the model URL. For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://huggingface.co/dslim/bert-base-NER
|
||||||
|
```
|
||||||
|
|
||||||
|
This command results in a local copy of of the model in the directory `bert-base-NER`.
|
||||||
|
|
||||||
|
3. Use the `eland_import_hub_model` script with the `--hub-model-id` set to the directory of the cloned model to install it:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
eland_import_hub_model \
|
||||||
|
--url 'XXXX' \
|
||||||
|
--hub-model-id /PATH/TO/MODEL \
|
||||||
|
--task-type ner \
|
||||||
|
--es-username elastic --es-password XXX \
|
||||||
|
--es-model-id bert-base-ner
|
||||||
|
```
|
||||||
|
|
||||||
|
If you use the Docker image to run `eland_import_hub_model` you must bind mount the model directory, so the container can read the files:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run --mount type=bind,source=/PATH/TO/MODEL,destination=/model,readonly -it --rm docker.elastic.co/eland/eland \
|
||||||
|
eland_import_hub_model \
|
||||||
|
--url 'XXXX' \
|
||||||
|
--hub-model-id /model \
|
||||||
|
--task-type ner \
|
||||||
|
--es-username elastic --es-password XXX \
|
||||||
|
--es-model-id bert-base-ner
|
||||||
|
```
|
||||||
|
|
||||||
|
Once it’s uploaded to {{es}}, the model will have the ID specified by `--es-model-id`. If it is not set, the model ID is derived from `--hub-model-id`; spaces and path delimiters are converted to double underscores `__`.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Connect to Elasticsearch through a proxy [ml-nlp-pytorch-proxy]
|
||||||
|
|
||||||
|
Behind the scenes, Eland uses the `requests` Python library, which [allows configuring proxies through an environment variable](https://requests.readthedocs.io/en/latest/user/advanced/#proxies). For example, to use an HTTP proxy to connect to an HTTPS Elasticsearch cluster, you need to set the `HTTPS_PROXY` environment variable when invoking Eland:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
HTTPS_PROXY=http://proxy-host:proxy-port eland_import_hub_model ...
|
||||||
|
```
|
||||||
|
|
||||||
|
If you disabled security on your Elasticsearch cluster, you should use `HTTP_PROXY` instead.
|
||||||
|
|
||||||
|
|
||||||
|
### Authentication methods [ml-nlp-pytorch-auth]
|
||||||
|
|
||||||
|
The following authentication options are available when using the import script:
|
||||||
|
|
||||||
|
* Elasticsearch username and password authentication (specified with the `-u` and `-p` options):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
eland_import_hub_model -u <username> -p <password> --cloud-id <cloud-id> ...
|
||||||
|
```
|
||||||
|
|
||||||
|
These `-u` and `-p` options also work when you use `--url`.
|
||||||
|
|
||||||
|
* Elasticsearch username and password authentication (embedded in the URL):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
eland_import_hub_model --url https://<user>:<password>@<hostname>:<port> ...
|
||||||
|
```
|
||||||
|
|
||||||
|
* Elasticsearch API key authentication:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
eland_import_hub_model --es-api-key <api-key> --url https://<hostname>:<port> ...
|
||||||
|
```
|
||||||
|
|
||||||
|
* HuggingFace Hub access token (for private models):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
eland_import_hub_model --hub-access-token <access-token> ...
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### TLS/SSL [ml-nlp-pytorch-tls]
|
||||||
|
|
||||||
|
The following TLS/SSL options for Elasticsearch are available when using the import script:
|
||||||
|
|
||||||
|
* Specify alternate CA bundle to verify the cluster certificate:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
eland_import_hub_model --ca-certs CA_CERTS ...
|
||||||
|
```
|
||||||
|
|
||||||
|
* Disable TLS/SSL verification altogether (strongly discouraged):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
eland_import_hub_model --insecure ...
|
||||||
|
```
|
||||||
|
|
||||||
|
|
6
docs/reference/toc.yml
Normal file
6
docs/reference/toc.yml
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
project: 'Eland reference'
|
||||||
|
toc:
|
||||||
|
- file: index.md
|
||||||
|
- file: installation.md
|
||||||
|
- file: dataframes.md
|
||||||
|
- file: machine-learning.md
|
Loading…
x
Reference in New Issue
Block a user