mirror of
https://github.com/valitydev/opendistro-security-advanced-modules.git
synced 2024-11-06 01:05:18 +00:00
Open Distro for Elasticsearch Security Advanced Modules initial release
This commit is contained in:
commit
99f8ebbb2e
32
.gitignore
vendored
Normal file
32
.gitignore
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
target/
|
||||
test-output/
|
||||
|
||||
/build.gradle
|
||||
*.log
|
||||
.externalToolBuilders
|
||||
maven-eclipse.xml
|
||||
|
||||
## eclipse ignores (use 'mvn eclipse:eclipse' to build eclipse projects)
|
||||
## The only configuration files which are not ignored are certain files in
|
||||
## .settings (as listed below) since these files ensure common coding
|
||||
## style across Eclipse and IDEA.
|
||||
## Other files (.project, .classpath) should be generated through Maven which
|
||||
## will correctly set the classpath based on the declared dependencies.
|
||||
.project
|
||||
.classpath
|
||||
eclipse-build
|
||||
*/.project
|
||||
*/.classpath
|
||||
*/eclipse-build
|
||||
/.settings/
|
||||
!/.settings/org.eclipse.core.resources.prefs
|
||||
!/.settings/org.eclipse.jdt.core.prefs
|
||||
!/.settings/org.eclipse.jdt.ui.prefs
|
||||
!/.settings/org.eclipse.jdt.groovy.core.prefs
|
||||
bin
|
||||
elasticsearch-*/
|
||||
.DS_Store
|
||||
data/
|
||||
puppet/.vagrant
|
||||
test.sh
|
||||
.vagrant/
|
4
CODE_OF_CONDUCT.md
Normal file
4
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,4 @@
|
||||
## Code of Conduct
|
||||
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
|
||||
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
|
||||
opensource-codeofconduct@amazon.com with any additional questions or comments.
|
61
CONTRIBUTING.md
Normal file
61
CONTRIBUTING.md
Normal file
@ -0,0 +1,61 @@
|
||||
# Contributing Guidelines
|
||||
|
||||
Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
|
||||
documentation, we greatly value feedback and contributions from our community.
|
||||
|
||||
Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
|
||||
information to effectively respond to your bug report or contribution.
|
||||
|
||||
|
||||
## Reporting Bugs/Feature Requests
|
||||
|
||||
We welcome you to use the GitHub issue tracker to report bugs or suggest features.
|
||||
|
||||
When filing an issue, please check [existing open](https://github.com/OpenDistro/elasticsearch-security-advanced-modules/issues), or [recently closed](https://github.com/OpenDistro/elasticsearch-security-advanced-modules/issues?utf8=%E2%9C%93&q=is%3Aissue%20is%3Aclosed%20), issues to make sure somebody else hasn't already
|
||||
reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
|
||||
|
||||
* A reproducible test case or series of steps
|
||||
* The version of our code being used
|
||||
* Any modifications you've made relevant to the bug
|
||||
* Anything unusual about your environment or deployment
|
||||
|
||||
|
||||
## Contributing via Pull Requests
|
||||
Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
|
||||
|
||||
1. You are working against the latest source on the *master* branch.
|
||||
2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
|
||||
3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
|
||||
|
||||
To send us a pull request, please:
|
||||
|
||||
1. Fork the repository.
|
||||
2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
|
||||
3. Ensure local tests pass.
|
||||
4. Commit to your fork using clear commit messages.
|
||||
5. Send us a pull request, answering any default questions in the pull request interface.
|
||||
6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
|
||||
|
||||
GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
|
||||
[creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
|
||||
|
||||
|
||||
## Finding contributions to work on
|
||||
Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any ['help wanted'](https://github.com/OpenDistro/elasticsearch-security-advanced-modules/labels/help%20wanted) issues is a great place to start.
|
||||
|
||||
|
||||
## Code of Conduct
|
||||
This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct).
|
||||
For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact
|
||||
opensource-codeofconduct@amazon.com with any additional questions or comments.
|
||||
|
||||
|
||||
## Security issue notifications
|
||||
If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/). Please do **not** create a public github issue.
|
||||
|
||||
|
||||
## Licensing
|
||||
|
||||
See the [LICENSE](https://github.com/OpenDistro/security-advanced-modules/blob/master/LICENSE) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
|
||||
|
||||
We may ask you to sign a [Contributor License Agreement (CLA)](http://en.wikipedia.org/wiki/Contributor_License_Agreement) for larger changes.
|
202
LICENSE
Executable file
202
LICENSE
Executable file
@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
9
NOTICE.txt
Normal file
9
NOTICE.txt
Normal file
@ -0,0 +1,9 @@
|
||||
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
|
||||
This product includes software developed by The Apache Software
|
||||
Foundation (http://www.apache.org/).
|
||||
|
||||
This product includes software developed by The Legion of the Bouncy Castle Inc.
|
||||
(http://www.bouncycastle.org)
|
||||
|
||||
See THIRD-PARTY.txt for additional third party licenses used by this product.
|
33
README.md
Executable file
33
README.md
Executable file
@ -0,0 +1,33 @@
|
||||
# Open Distro For Elasticsearch Security Advanced Modules
|
||||
|
||||
## About this module
|
||||
This module for Open Distro for Elasticsearch Security adds the following advanced features:
|
||||
|
||||
* Active Directory and LDAP Authentication/Authorisation
|
||||
* Kerberos/SPNEGO Authentication/Authorisation
|
||||
* JSON Web Token (JWT) Authentication/Authorisation
|
||||
* Document- and field level security
|
||||
* Audit logging
|
||||
* REST management API
|
||||
* Kibana multi tenancy module
|
||||
|
||||
## Documentation
|
||||
Please refer to the official documentation for installation and configuration instructions:
|
||||
|
||||
## License
|
||||
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License").
|
||||
You may not use this file except in compliance with the License.
|
||||
A copy of the License is located at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
or in the "license" file accompanying this file. This file is distributed
|
||||
on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
express or implied. See the License for the specific language governing
|
||||
permissions and limitations under the License.
|
||||
|
||||
## Legal
|
||||
Open Distro For Elasticsearch Security Advanced Modules
|
||||
Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
116
THIRD-PARTY.txt
Normal file
116
THIRD-PARTY.txt
Normal file
@ -0,0 +1,116 @@
|
||||
|
||||
Lists of 114 third-party dependencies.
|
||||
(The Apache Software License, Version 2.0) ZkClient (com.101tec:zkclient:0.10 - https://github.com/sgroschupf/zkclient)
|
||||
(The Apache Software License, Version 2.0) HPPC Collections (com.carrotsearch:hppc:0.7.1 - http://labs.carrotsearch.com/hppc.html/hppc)
|
||||
(MIT License) minimal-json (com.eclipsesource.minimal-json:minimal-json:0.9.4 - https://github.com/ralfstx/minimal-json)
|
||||
(The Apache Software License, Version 2.0) Jackson-annotations (com.fasterxml.jackson.core:jackson-annotations:2.8.0 - http://github.com/FasterXML/jackson)
|
||||
(The Apache Software License, Version 2.0) Jackson-core (com.fasterxml.jackson.core:jackson-core:2.8.10 - https://github.com/FasterXML/jackson-core)
|
||||
(The Apache Software License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.8.11 - http://github.com/FasterXML/jackson)
|
||||
(The Apache Software License, Version 2.0) Jackson dataformat: CBOR (com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.8.10 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
(The Apache Software License, Version 2.0) Jackson dataformat: Smile (com.fasterxml.jackson.dataformat:jackson-dataformat-smile:2.8.10 - http://github.com/FasterXML/jackson-dataformats-binary)
|
||||
(The Apache Software License, Version 2.0) Jackson-dataformat-YAML (com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.8.10 - https://github.com/FasterXML/jackson)
|
||||
(The Apache License, Version 2.0) Woodstox (com.fasterxml.woodstox:woodstox-core:5.0.3 - https://github.com/FasterXML/woodstox)
|
||||
(The Apache Software License, Version 2.0) zjsonpatch (com.flipkart.zjsonpatch:zjsonpatch:0.4.1 - https://github.com/flipkart-incubator/zjsonpatch/)
|
||||
(Apache 2) (GNU Lesser General Public License) LDAPTIVE CORE (com.floragunn:ldaptive:1.1.0-fg-fork1 - http://www.ldaptive.org/ldaptive)
|
||||
(The Apache Software License, Version 2.0) OpenDistro Security (com.amazon.opendistroforelasticsearch:opendistro-elasticsearch-security:0.0.7.0 - https://github.com/mauve-hedgehog/opendistro-elasticsearch-security)
|
||||
(The Apache Software License, Version 2.0) OpenDistro Security SSL (com.amazon.opendistroforelasticsearch:opendistro-elasticsearch-security-ssl:0.0.7.0 - https://github.com/mauve-hedgehog/opendistro-elasticsearch-security-ssl)
|
||||
(Apache License 2.0) compiler (com.github.spullara.mustache.java:compiler:0.9.3 - http://github.com/spullara/mustache.java)
|
||||
(The Apache Software License, Version 2.0) json-flattener-java7 (com.github.wnameless:json-flattener-java7:0.4.1 - https://github.com/wnameless/json-flattener)
|
||||
(The Apache Software License, Version 2.0) FindBugs-jsr305 (com.google.code.findbugs:jsr305:1.3.9 - http://findbugs.sourceforge.net/)
|
||||
(Apache 2.0) error-prone annotations (com.google.errorprone:error_prone_annotations:2.0.18 - http://nexus.sonatype.org/oss-repository-hosting.html/error_prone_parent/error_prone_annotations)
|
||||
(The Apache Software License, Version 2.0) Guava: Google Core Libraries for Java (com.google.guava:guava:23.0 - https://github.com/google/guava/guava)
|
||||
(The Apache Software License, Version 2.0) J2ObjC Annotations (com.google.j2objc:j2objc-annotations:1.1 - https://github.com/google/j2objc/)
|
||||
(The Apache Software License, Version 2.0) T-Digest (com.tdunning:t-digest:3.0 - https://github.com/tdunning/t-digest)
|
||||
(Lesser General Public License (LGPL)) JTS Topology Suite (com.vividsolutions:jts:1.13 - http://sourceforge.net/projects/jts-topo-suite)
|
||||
(Apache License 2.0) Metrics Core Library (com.yammer.metrics:metrics-core:2.2.0 - http://metrics.codahale.com/metrics-core/)
|
||||
(Apache License, Version 2.0) Apache Commons CLI (commons-cli:commons-cli:1.3.1 - http://commons.apache.org/proper/commons-cli/)
|
||||
(The Apache Software License, Version 2.0) Apache Commons Codec (commons-codec:commons-codec:1.9 - http://commons.apache.org/proper/commons-codec/)
|
||||
(The Apache Software License, Version 2.0) Apache Commons Logging (commons-logging:commons-logging:1.2 - http://commons.apache.org/proper/commons-logging/)
|
||||
(Apache License, Version 2.0) JSON Web Token support for the JVM (io.jsonwebtoken:jjwt:0.9.0 - http://nexus.sonatype.org/oss-repository-hosting.html/jjwt)
|
||||
(Apache License, Version 2.0) Netty/Buffer (io.netty:netty-buffer:4.1.16.Final - http://netty.io/netty-buffer/)
|
||||
(Apache License, Version 2.0) Netty/Codec (io.netty:netty-codec:4.1.16.Final - http://netty.io/netty-codec/)
|
||||
(Apache License, Version 2.0) Netty/Codec/HTTP (io.netty:netty-codec-http:4.1.16.Final - http://netty.io/netty-codec-http/)
|
||||
(Apache License, Version 2.0) Netty/Common (io.netty:netty-common:4.1.16.Final - http://netty.io/netty-common/)
|
||||
(Apache License, Version 2.0) Netty/Handler (io.netty:netty-handler:4.1.16.Final - http://netty.io/netty-handler/)
|
||||
(Apache License, Version 2.0) Netty/Resolver (io.netty:netty-resolver:4.1.16.Final - http://netty.io/netty-resolver/)
|
||||
(Apache License, Version 2.0) Netty/Transport (io.netty:netty-transport:4.1.16.Final - http://netty.io/netty-transport/)
|
||||
(Apache 2) Joda-Time (joda-time:joda-time:2.9.9 - http://www.joda.org/joda-time/)
|
||||
(Eclipse Public License 1.0) JUnit (junit:junit:4.12 - http://junit.org)
|
||||
(The Apache Software License, Version 2.0) Apache Log4j (log4j:log4j:1.2.16 - http://logging.apache.org/log4j/1.2/)
|
||||
(The Apache Software License, Version 2.0) Byte Buddy (without dependencies) (net.bytebuddy:byte-buddy:1.7.9 - http://bytebuddy.net/byte-buddy)
|
||||
(The Apache Software License, Version 2.0) Byte Buddy Java agent (net.bytebuddy:byte-buddy-agent:1.7.9 - http://bytebuddy.net/byte-buddy-agent)
|
||||
(The MIT License) JOpt Simple (net.sf.jopt-simple:jopt-simple:5.0.2 - http://pholser.github.io/jopt-simple)
|
||||
(Apache License, Version 2.0) Apache Commons Lang (org.apache.commons:commons-lang3:3.5 - http://commons.apache.org/proper/commons-lang/)
|
||||
(Apache License, Version 2.0) Apache Commons Text (org.apache.commons:commons-text:1.1 - http://commons.apache.org/proper/commons-text/)
|
||||
(The Apache Software License, Version 2.0) Apache CXF Core (org.apache.cxf:cxf-core:3.2.2 - http://cxf.apache.org)
|
||||
(The Apache Software License, Version 2.0) Apache CXF JAX-RS Extensions: JSON Basic (org.apache.cxf:cxf-rt-rs-json-basic:3.2.2 - http://cxf.apache.org)
|
||||
(The Apache Software License, Version 2.0) Apache CXF Runtime JOSE Core (org.apache.cxf:cxf-rt-rs-security-jose:3.2.2 - http://cxf.apache.org)
|
||||
(The Apache Software License, Version 2.0) Apache CXF Runtime Security functionality (org.apache.cxf:cxf-rt-security:3.2.2 - http://cxf.apache.org)
|
||||
(Apache License, Version 2.0) ApacheDS All (org.apache.directory.server:apacheds-all:2.0.0-M21 - http://directory.apache.org/apacheds/1.5/apacheds-all)
|
||||
(Apache License, Version 2.0) Apache HttpClient Fluent API (org.apache.httpcomponents:fluent-hc:4.5.3 - http://hc.apache.org/httpcomponents-client)
|
||||
(Apache License, Version 2.0) Apache HttpAsyncClient (org.apache.httpcomponents:httpasyncclient:4.1.2 - http://hc.apache.org/httpcomponents-asyncclient)
|
||||
(Apache License, Version 2.0) Apache HttpClient (org.apache.httpcomponents:httpclient:4.5.3 - http://hc.apache.org/httpcomponents-client)
|
||||
(Apache License, Version 2.0) Apache HttpClient Cache (org.apache.httpcomponents:httpclient-cache:4.5.3 - http://hc.apache.org/httpcomponents-client)
|
||||
(Apache License, Version 2.0) Apache HttpCore (org.apache.httpcomponents:httpcore:4.4.6 - http://hc.apache.org/httpcomponents-core-ga)
|
||||
(Apache License, Version 2.0) Apache HttpCore NIO (org.apache.httpcomponents:httpcore-nio:4.4.5 - http://hc.apache.org/httpcomponents-core-ga)
|
||||
(The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka-clients:1.0.1 - http://kafka.apache.org)
|
||||
(The Apache Software License, Version 2.0) Apache Kafka (org.apache.kafka:kafka_2.11:1.0.0 - http://kafka.apache.org)
|
||||
(Apache License, Version 2.0) Apache Log4j API (org.apache.logging.log4j:log4j-api:2.9.1 - https://logging.apache.org/log4j/2.x/log4j-api/)
|
||||
(Apache License, Version 2.0) Apache Log4j Core (org.apache.logging.log4j:log4j-core:2.9.1 - https://logging.apache.org/log4j/2.x/log4j-core/)
|
||||
(Apache License, Version 2.0) Apache Log4j SLF4J Binding (org.apache.logging.log4j:log4j-slf4j-impl:2.9.1 - https://logging.apache.org/log4j/2.x/log4j-slf4j-impl/)
|
||||
(Apache 2) Lucene Common Analyzers (org.apache.lucene:lucene-analyzers-common:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-analyzers-common)
|
||||
(Apache 2) Lucene Memory (org.apache.lucene:lucene-backward-codecs:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-backward-codecs)
|
||||
(Apache 2) Lucene Core (org.apache.lucene:lucene-core:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-core)
|
||||
(Apache 2) Lucene Grouping (org.apache.lucene:lucene-grouping:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-grouping)
|
||||
(Apache 2) Lucene Highlighter (org.apache.lucene:lucene-highlighter:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-highlighter)
|
||||
(Apache 2) Lucene Join (org.apache.lucene:lucene-join:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-join)
|
||||
(Apache 2) Lucene Memory (org.apache.lucene:lucene-memory:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-memory)
|
||||
(Apache 2) Lucene Miscellaneous (org.apache.lucene:lucene-misc:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-misc)
|
||||
(Apache 2) Lucene Queries (org.apache.lucene:lucene-queries:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-queries)
|
||||
(Apache 2) Lucene QueryParsers (org.apache.lucene:lucene-queryparser:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-queryparser)
|
||||
(Apache 2) Lucene Sandbox (org.apache.lucene:lucene-sandbox:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-sandbox)
|
||||
(Apache 2) Lucene Spatial (org.apache.lucene:lucene-spatial:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-spatial)
|
||||
(Apache 2) Lucene Spatial Extras (org.apache.lucene:lucene-spatial-extras:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-spatial-extras)
|
||||
(Apache 2) Lucene Spatial 3D (org.apache.lucene:lucene-spatial3d:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-spatial3d)
|
||||
(Apache 2) Lucene Suggest (org.apache.lucene:lucene-suggest:7.2.1 - http://lucene.apache.org/lucene-parent/lucene-suggest)
|
||||
(The Apache Software License, Version 2.0) XmlSchema Core (org.apache.ws.xmlschema:xmlschema-core:2.2.3 - http://ws.apache.org/commons/xmlschema20/xmlschema-core/)
|
||||
(The Apache Software License, Version 2.0) zookeeper (org.apache.zookeeper:zookeeper:3.4.10 - no url defined)
|
||||
(Apache Software License, Version 1.1) (Bouncy Castle Licence) Bouncy Castle OpenPGP API (org.bouncycastle:bcpg-jdk15on:1.59 - http://www.bouncycastle.org/java.html)
|
||||
(Bouncy Castle Licence) Bouncy Castle Provider (org.bouncycastle:bcprov-jdk15on:1.59 - http://www.bouncycastle.org/java.html)
|
||||
(MIT license) Animal Sniffer Annotations (org.codehaus.mojo:animal-sniffer-annotations:1.14 - http://mojo.codehaus.org/animal-sniffer/animal-sniffer-annotations)
|
||||
(The BSD License) Stax2 API (org.codehaus.woodstox:stax2-api:3.1.4 - http://wiki.fasterxml.com/WoodstoxStax2)
|
||||
(The Apache Software License, Version 2.0) server (org.elasticsearch:elasticsearch:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) cli (org.elasticsearch:elasticsearch-cli:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) elasticsearch-core (org.elasticsearch:elasticsearch-core:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) Elastic JNA Distribution (org.elasticsearch:jna:4.5.1 - https://github.com/java-native-access/jna)
|
||||
(The Apache Software License, Version 2.0) Elasticsearch SecureSM (org.elasticsearch:securesm:1.2 - http://nexus.sonatype.org/oss-repository-hosting.html/securesm)
|
||||
(The Apache Software License, Version 2.0) rest (org.elasticsearch.client:elasticsearch-rest-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) rest-high-level (org.elasticsearch.client:elasticsearch-rest-high-level-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) aggs-matrix-stats (org.elasticsearch.plugin:aggs-matrix-stats-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) lang-mustache (org.elasticsearch.plugin:lang-mustache-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) parent-join (org.elasticsearch.plugin:parent-join-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) percolator (org.elasticsearch.plugin:percolator-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) rank-eval (org.elasticsearch.plugin:rank-eval-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) reindex (org.elasticsearch.plugin:reindex-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(The Apache Software License, Version 2.0) transport-netty4 (org.elasticsearch.plugin:transport-netty4-client:6.2.2 - https://github.com/elastic/elasticsearch)
|
||||
(New BSD License) Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
|
||||
(New BSD License) Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
|
||||
(Public Domain, per Creative Commons CC0) HdrHistogram (org.hdrhistogram:HdrHistogram:2.1.9 - http://hdrhistogram.github.io/HdrHistogram/)
|
||||
(The Apache Software License, Version 2.0) Spatial4J (org.locationtech.spatial4j:spatial4j:0.6 - http://www.locationtech.org/projects/locationtech.spatial4j)
|
||||
(The Apache Software License, Version 2.0) LZ4 and xxHash (org.lz4:lz4-java:1.4 - https://github.com/lz4/lz4-java)
|
||||
(The MIT License) Mockito (org.mockito:mockito-all:1.10.19 - http://www.mockito.org)
|
||||
(The MIT License) mockito-core (org.mockito:mockito-core:2.15.0 - https://github.com/mockito/mockito)
|
||||
(Apache 2) Objenesis (org.objenesis:objenesis:2.6 - http://objenesis.org)
|
||||
(BSD 3-Clause) Scala Library (org.scala-lang:scala-library:2.11.11 - http://www.scala-lang.org/)
|
||||
(MIT License) SLF4J API Module (org.slf4j:slf4j-api:1.7.25 - http://www.slf4j.org)
|
||||
(Apache License, Version 2.0) Spring AOP (org.springframework:spring-aop:5.0.4.RELEASE - https://github.com/spring-projects/spring-framework)
|
||||
(Apache License, Version 2.0) Spring Beans (org.springframework:spring-beans:5.0.4.RELEASE - https://github.com/spring-projects/spring-framework)
|
||||
(Apache License, Version 2.0) Spring Context (org.springframework:spring-context:5.0.4.RELEASE - https://github.com/spring-projects/spring-framework)
|
||||
(Apache License, Version 2.0) Spring Core (org.springframework:spring-core:5.0.4.RELEASE - https://github.com/spring-projects/spring-framework)
|
||||
(Apache License, Version 2.0) Spring Expression Language (SpEL) (org.springframework:spring-expression:5.0.4.RELEASE - https://github.com/spring-projects/spring-framework)
|
||||
(Apache License, Version 2.0) Spring Commons Logging Bridge (org.springframework:spring-jcl:5.0.4.RELEASE - https://github.com/spring-projects/spring-framework)
|
||||
(Apache License, Version 2.0) Spring TestContext Framework (org.springframework:spring-test:5.0.4.RELEASE - https://github.com/spring-projects/spring-framework)
|
||||
(The Apache Software License, Version 2.0) Spring Kafka Test Support (org.springframework.kafka:spring-kafka-test:2.1.4.RELEASE - https://github.com/spring-projects/spring-kafka)
|
||||
(Apache 2.0) Spring Retry (org.springframework.retry:spring-retry:1.2.2.RELEASE - http://www.springsource.org)
|
||||
(The Apache Software License, Version 2.0) snappy-java (org.xerial.snappy:snappy-java:1.1.4 - https://github.com/xerial/snappy-java)
|
||||
(Apache License, Version 2.0) SnakeYAML (org.yaml:snakeyaml:1.17 - http://www.snakeyaml.org)
|
389
pom.xml
Executable file
389
pom.xml
Executable file
@ -0,0 +1,389 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<!--
|
||||
~ Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
~
|
||||
~ Licensed under the Apache License, Version 2.0 (the "License").
|
||||
~ You may not use this file except in compliance with the License.
|
||||
~ A copy of the License is located at
|
||||
~
|
||||
~ http://www.apache.org/licenses/LICENSE-2.0
|
||||
~
|
||||
~ or in the "license" file accompanying this file. This file is distributed
|
||||
~ on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
~ express or implied. See the License for the specific language governing
|
||||
~ permissions and limitations under the License.
|
||||
-->
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.amazon.opendistroforelasticsearch</groupId>
|
||||
<artifactId>opendistro_security_parent</artifactId>
|
||||
<version>0.7.0.0</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>opendistro_security_advanced_modules</artifactId>
|
||||
<version>0.7.0.0</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<name>Open Distro Security Advanced Modules for Elasticsearch</name>
|
||||
<description>Advanced Modules for Elasticsearch Security</description>
|
||||
<url>https://github.com/mauve-hedgehog/opendistro-elasticsearch-security-enterprise</url>
|
||||
<inceptionYear>2016</inceptionYear>
|
||||
|
||||
<properties>
|
||||
<security.version>0.7.0.0</security.version>
|
||||
<elasticsearch.version>6.5.4</elasticsearch.version>
|
||||
|
||||
<!-- deps -->
|
||||
<log4j.version>2.11.1</log4j.version>
|
||||
<jjwt.version>0.10.5</jjwt.version>
|
||||
<ldaptive.version>1.2.3</ldaptive.version>
|
||||
<jackson-databind.version>2.8.11.1</jackson-databind.version>
|
||||
<http.commons.version>4.5.3</http.commons.version>
|
||||
<cxf.version>3.2.2</cxf.version>
|
||||
<guava.version>25.1-jre</guava.version>
|
||||
|
||||
<!-- Test only -->
|
||||
<mockito.version>2.21.0</mockito.version>
|
||||
<unboundid-ldapsdk.version>4.0.9</unboundid-ldapsdk.version>
|
||||
</properties>
|
||||
|
||||
<scm>
|
||||
<url>https://github.com/mauve-hedgehog/opendistro-elasticsearch-security-enterprise-modules</url>
|
||||
<connection>scm:git:git@github.com:mauve-hedgehog/opendistro-elasticsearch-security-advanced-modules.git</connection>
|
||||
<developerConnection>scm:git:git@github.com:mauve-hedgehog/opendistro-elasticsearch-security-advanced-modules.git</developerConnection>
|
||||
<tag>v0.7.0.0</tag>
|
||||
</scm>
|
||||
|
||||
<issueManagement>
|
||||
<system>GitHub</system>
|
||||
<url>https://github.com/mauve-hedgehog/opendistro-elasticsearch-security-enterprise-modules/issues</url>
|
||||
</issueManagement>
|
||||
|
||||
<contributors />
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.amazon.opendistroforelasticsearch</groupId>
|
||||
<artifactId>opendistro_security</artifactId>
|
||||
<version>${security.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-core</artifactId>
|
||||
<version>${log4j.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-slf4j-impl</artifactId>
|
||||
<version>${log4j.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<version>${elasticsearch.version}</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.ldaptive</groupId>
|
||||
<artifactId>ldaptive</artifactId>
|
||||
<version>${ldaptive.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>commons-cli</artifactId>
|
||||
<groupId>commons-cli</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>fluent-hc</artifactId>
|
||||
<version>${http.commons.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>httpclient-cache</artifactId>
|
||||
<version>${http.commons.version}</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch.client</groupId>
|
||||
<artifactId>elasticsearch-rest-high-level-client</artifactId>
|
||||
<version>${elasticsearch.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-api</artifactId>
|
||||
<version>${jjwt.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-impl</artifactId>
|
||||
<version>${jjwt.version}</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-jackson</artifactId>
|
||||
<version>${jjwt.version}</version>
|
||||
<scope>runtime</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.cxf</groupId>
|
||||
<artifactId>cxf-rt-rs-security-jose</artifactId>
|
||||
<version>${cxf.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<version>${jackson-databind.version}</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.github.wnameless</groupId>
|
||||
<artifactId>json-flattener</artifactId>
|
||||
<version>0.5.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.flipkart.zjsonpatch</groupId>
|
||||
<artifactId>zjsonpatch</artifactId>
|
||||
<version>0.4.4</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka-clients</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<groupId>org.slf4j</groupId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.onelogin</groupId>
|
||||
<artifactId>java-saml</artifactId>
|
||||
<version>2.3.0</version>
|
||||
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.codehaus.woodstox</groupId>
|
||||
<artifactId>woodstox-core-asl</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.opensaml</groupId>
|
||||
<artifactId>opensaml-saml-impl</artifactId>
|
||||
<version>3.3.0</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>joda-time</groupId>
|
||||
<artifactId>joda-time</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>commons-collections</groupId>
|
||||
<artifactId>commons-collections</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-collections</groupId>
|
||||
<artifactId>commons-collections</artifactId>
|
||||
<version>3.2.2</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>${guava.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<version>2.4.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Only test scoped dependencies hereafter -->
|
||||
|
||||
<dependency>
|
||||
<groupId>com.amazon.opendistroforelasticsearch</groupId>
|
||||
<artifactId>opendistro_security</artifactId>
|
||||
<version>${security.version}</version>
|
||||
<classifier>tests</classifier>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.kafka</groupId>
|
||||
<artifactId>spring-kafka-test</artifactId>
|
||||
<version>2.1.4.RELEASE</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.kafka</groupId>
|
||||
<artifactId>kafka-clients</artifactId>
|
||||
<version>1.0.1</version>
|
||||
<scope>test</scope>
|
||||
<classifier>test</classifier>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch.plugin</groupId>
|
||||
<artifactId>reindex-client</artifactId>
|
||||
<version>${elasticsearch.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch.plugin</groupId>
|
||||
<artifactId>percolator-client</artifactId>
|
||||
<version>${elasticsearch.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-core</artifactId>
|
||||
<version>${mockito.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
<version>2.5</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- https://mvnrepository.com/artifact/com.unboundid/unboundid-ldapsdk -->
|
||||
<dependency>
|
||||
<groupId>com.unboundid</groupId>
|
||||
<artifactId>unboundid-ldapsdk</artifactId>
|
||||
<version>${unboundid-ldapsdk.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
<artifactId>commons-io</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.0.0-M2</version>
|
||||
<configuration>
|
||||
<rerunFailingTestsCount>3</rerunFailingTestsCount>
|
||||
<forkCount>3</forkCount>
|
||||
<reuseForks>true</reuseForks>
|
||||
<!-- <parallel>methods</parallel> <threadCount>1</threadCount> -->
|
||||
<systemPropertyVariables>
|
||||
<forkno>fork_${surefire.forkNumber}</forkno>
|
||||
</systemPropertyVariables>
|
||||
|
||||
<includes>
|
||||
<include>**/*.java</include>
|
||||
</includes>
|
||||
</configuration>
|
||||
|
||||
|
||||
</plugin>
|
||||
</plugins>
|
||||
<extensions>
|
||||
<extension>
|
||||
<groupId>com.gkatzioura.maven.cloud</groupId>
|
||||
<artifactId>s3-storage-wagon</artifactId>
|
||||
<version>1.8</version>
|
||||
</extension>
|
||||
</extensions>
|
||||
</build>
|
||||
</project>
|
@ -0,0 +1,227 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Collection;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtClaims;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtToken;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.AuthenticatorUnavailableException;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.BadCredentialsException;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.JwtVerifier;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.KeyProvider;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.HTTPAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
|
||||
public abstract class AbstractHTTPJwtAuthenticator implements HTTPAuthenticator {
|
||||
private final static Logger log = LogManager.getLogger(AbstractHTTPJwtAuthenticator.class);
|
||||
|
||||
private static final String BEARER = "bearer ";
|
||||
|
||||
private KeyProvider keyProvider;
|
||||
private JwtVerifier jwtVerifier;
|
||||
private final String jwtHeaderName;
|
||||
private final String jwtUrlParameter;
|
||||
private final String subjectKey;
|
||||
private final String rolesKey;
|
||||
|
||||
public AbstractHTTPJwtAuthenticator(Settings settings, Path configPath) {
|
||||
jwtUrlParameter = settings.get("jwt_url_parameter");
|
||||
jwtHeaderName = settings.get("jwt_header", "Authorization");
|
||||
rolesKey = settings.get("roles_key");
|
||||
subjectKey = settings.get("subject_key");
|
||||
|
||||
try {
|
||||
this.keyProvider = this.initKeyProvider(settings, configPath);
|
||||
|
||||
jwtVerifier = new JwtVerifier(keyProvider);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Error creating JWT authenticator: " + e + ". JWT authentication will not work", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthCredentials extractCredentials(RestRequest request, ThreadContext context)
|
||||
throws ElasticsearchSecurityException {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
AuthCredentials creds = AccessController.doPrivileged(new PrivilegedAction<AuthCredentials>() {
|
||||
@Override
|
||||
public AuthCredentials run() {
|
||||
return extractCredentials0(request);
|
||||
}
|
||||
});
|
||||
|
||||
return creds;
|
||||
}
|
||||
|
||||
private AuthCredentials extractCredentials0(final RestRequest request) throws ElasticsearchSecurityException {
|
||||
|
||||
String jwtString = getJwtTokenString(request);
|
||||
|
||||
if (Strings.isNullOrEmpty(jwtString)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
JwtToken jwt;
|
||||
|
||||
try {
|
||||
jwt = jwtVerifier.getVerifiedJwtToken(jwtString);
|
||||
} catch (AuthenticatorUnavailableException e) {
|
||||
log.info(e);
|
||||
throw new ElasticsearchSecurityException(e.getMessage(), RestStatus.SERVICE_UNAVAILABLE);
|
||||
} catch (BadCredentialsException e) {
|
||||
log.info("Extracting JWT token from " + jwtString + " failed", e);
|
||||
return null;
|
||||
}
|
||||
|
||||
JwtClaims claims = jwt.getClaims();
|
||||
|
||||
final String subject = extractSubject(claims);
|
||||
|
||||
if (subject == null) {
|
||||
log.error("No subject found in JWT token");
|
||||
return null;
|
||||
}
|
||||
|
||||
final String[] roles = extractRoles(claims);
|
||||
|
||||
final AuthCredentials ac = new AuthCredentials(subject, roles).markComplete();
|
||||
|
||||
for (Entry<String, Object> claim : claims.asMap().entrySet()) {
|
||||
ac.addAttribute("attr.jwt." + claim.getKey(), String.valueOf(claim.getValue()));
|
||||
}
|
||||
|
||||
return ac;
|
||||
|
||||
}
|
||||
|
||||
protected String getJwtTokenString(RestRequest request) {
|
||||
String jwtToken = request.header(jwtHeaderName);
|
||||
|
||||
if (jwtUrlParameter != null) {
|
||||
if (jwtToken == null || jwtToken.isEmpty()) {
|
||||
jwtToken = request.param(jwtUrlParameter);
|
||||
} else {
|
||||
// just consume to avoid "contains unrecognized parameter"
|
||||
request.param(jwtUrlParameter);
|
||||
}
|
||||
}
|
||||
|
||||
if (jwtToken == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
int index;
|
||||
|
||||
if ((index = jwtToken.toLowerCase().indexOf(BEARER)) > -1) { // detect Bearer
|
||||
jwtToken = jwtToken.substring(index + BEARER.length());
|
||||
}
|
||||
|
||||
return jwtToken;
|
||||
}
|
||||
|
||||
protected String extractSubject(JwtClaims claims) {
|
||||
String subject = claims.getSubject();
|
||||
|
||||
if (subjectKey != null) {
|
||||
Object subjectObject = claims.getClaim(subjectKey);
|
||||
|
||||
if (subjectObject == null) {
|
||||
log.warn("Failed to get subject from JWT claims, check if subject_key '{}' is correct.", subjectKey);
|
||||
return null;
|
||||
}
|
||||
|
||||
// We expect a String. If we find something else, convert to String but issue a
|
||||
// warning
|
||||
if (!(subjectObject instanceof String)) {
|
||||
log.warn(
|
||||
"Expected type String for roles in the JWT for subject_key {}, but value was '{}' ({}). Will convert this value to String.",
|
||||
subjectKey, subjectObject, subjectObject.getClass());
|
||||
subject = String.valueOf(subjectObject);
|
||||
} else {
|
||||
subject = (String) subjectObject;
|
||||
}
|
||||
}
|
||||
return subject;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected String[] extractRoles(JwtClaims claims) {
|
||||
if (rolesKey == null) {
|
||||
return new String[0];
|
||||
}
|
||||
|
||||
Object rolesObject = claims.getClaim(rolesKey);
|
||||
|
||||
if (rolesObject == null) {
|
||||
log.warn(
|
||||
"Failed to get roles from JWT claims with roles_key '{}'. Check if this key is correct and available in the JWT payload.",
|
||||
rolesKey);
|
||||
return new String[0];
|
||||
}
|
||||
|
||||
String[] roles = String.valueOf(rolesObject).split(",");
|
||||
|
||||
// We expect a String or Collection. If we find something else, convert to
|
||||
// String but issue a warning
|
||||
if (!(rolesObject instanceof String) && !(rolesObject instanceof Collection<?>)) {
|
||||
log.warn(
|
||||
"Expected type String or Collection for roles in the JWT for roles_key {}, but value was '{}' ({}). Will convert this value to String.",
|
||||
rolesKey, rolesObject, rolesObject.getClass());
|
||||
} else if (rolesObject instanceof Collection<?>) {
|
||||
roles = ((Collection<String>) rolesObject).toArray(new String[0]);
|
||||
}
|
||||
|
||||
for (int i = 0; i < roles.length; i++) {
|
||||
roles[i] = roles[i].trim();
|
||||
}
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
protected abstract KeyProvider initKeyProvider(Settings settings, Path configPath) throws Exception;
|
||||
|
||||
@Override
|
||||
public boolean reRequestAuthentication(RestChannel channel, AuthCredentials authCredentials) {
|
||||
final BytesRestResponse wwwAuthenticateResponse = new BytesRestResponse(RestStatus.UNAUTHORIZED, "");
|
||||
wwwAuthenticateResponse.addHeader("WWW-Authenticate", "Bearer realm=\"Open Distro Security\"");
|
||||
channel.sendResponse(wwwAuthenticateResponse);
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,255 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt;
|
||||
|
||||
import io.jsonwebtoken.Claims;
|
||||
import io.jsonwebtoken.JwtParser;
|
||||
import io.jsonwebtoken.Jwts;
|
||||
import io.jsonwebtoken.io.Decoders;
|
||||
import io.jsonwebtoken.security.Keys;
|
||||
import io.jsonwebtoken.security.WeakKeyException;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.Key;
|
||||
import java.security.KeyFactory;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.security.PublicKey;
|
||||
import java.security.spec.InvalidKeySpecException;
|
||||
import java.security.spec.X509EncodedKeySpec;
|
||||
import java.util.Collection;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.HTTPAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
|
||||
public class HTTPJwtAuthenticator implements HTTPAuthenticator {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
|
||||
private static final String BEARER = "bearer ";
|
||||
private final JwtParser jwtParser;
|
||||
private final String jwtHeaderName;
|
||||
private final String jwtUrlParameter;
|
||||
private final String rolesKey;
|
||||
private final String subjectKey;
|
||||
|
||||
public HTTPJwtAuthenticator(final Settings settings, final Path configPath) {
|
||||
super();
|
||||
|
||||
JwtParser _jwtParser = null;
|
||||
|
||||
try {
|
||||
String signingKey = settings.get("signing_key");
|
||||
|
||||
if(signingKey == null || signingKey.length() == 0) {
|
||||
log.error("signingKey must not be null or empty. JWT authentication will not work");
|
||||
} else {
|
||||
|
||||
signingKey = signingKey.replace("-----BEGIN PUBLIC KEY-----\n", "");
|
||||
signingKey = signingKey.replace("-----END PUBLIC KEY-----", "");
|
||||
|
||||
byte[] decoded = Decoders.BASE64.decode(signingKey);
|
||||
Key key = null;
|
||||
|
||||
try {
|
||||
key = getPublicKey(decoded, "RSA");
|
||||
} catch (Exception e) {
|
||||
log.debug("No public RSA key, try other algos ({})", e.toString());
|
||||
}
|
||||
|
||||
try {
|
||||
key = getPublicKey(decoded, "EC");
|
||||
} catch (Exception e) {
|
||||
log.debug("No public ECDSA key, try other algos ({})", e.toString());
|
||||
}
|
||||
|
||||
if(key != null) {
|
||||
_jwtParser = Jwts.parser().setSigningKey(key);
|
||||
} else {
|
||||
_jwtParser = Jwts.parser().setSigningKey(decoded);
|
||||
}
|
||||
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
log.error("Error creating JWT authenticator: "+e+". JWT authentication will not work", e);
|
||||
}
|
||||
|
||||
jwtUrlParameter = settings.get("jwt_url_parameter");
|
||||
jwtHeaderName = settings.get("jwt_header","Authorization");
|
||||
rolesKey = settings.get("roles_key");
|
||||
subjectKey = settings.get("subject_key");
|
||||
jwtParser = _jwtParser;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public AuthCredentials extractCredentials(RestRequest request, ThreadContext context) throws ElasticsearchSecurityException {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
AuthCredentials creds = AccessController.doPrivileged(new PrivilegedAction<AuthCredentials>() {
|
||||
@Override
|
||||
public AuthCredentials run() {
|
||||
return extractCredentials0(request);
|
||||
}
|
||||
});
|
||||
|
||||
return creds;
|
||||
}
|
||||
|
||||
private AuthCredentials extractCredentials0(final RestRequest request) {
|
||||
if (jwtParser == null) {
|
||||
log.error("Missing Signing Key. JWT authentication will not work");
|
||||
return null;
|
||||
}
|
||||
|
||||
String jwtToken = request.header(jwtHeaderName);
|
||||
|
||||
if((jwtToken == null || jwtToken.isEmpty()) && jwtUrlParameter != null) {
|
||||
jwtToken = request.param(jwtUrlParameter);
|
||||
} else {
|
||||
//just consume to avoid "contains unrecognized parameter"
|
||||
request.param(jwtUrlParameter);
|
||||
}
|
||||
|
||||
if (jwtToken == null || jwtToken.length() == 0) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("No JWT token found in '{}' {} header", jwtUrlParameter==null?jwtHeaderName:jwtUrlParameter, jwtUrlParameter==null?"header":"url parameter");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
final int index;
|
||||
if((index = jwtToken.toLowerCase().indexOf(BEARER)) > -1) { //detect Bearer
|
||||
jwtToken = jwtToken.substring(index+BEARER.length());
|
||||
} else {
|
||||
log.warn("No Bearer scheme found in header");
|
||||
}
|
||||
|
||||
try {
|
||||
final Claims claims = jwtParser.parseClaimsJws(jwtToken).getBody();
|
||||
|
||||
final String subject = extractSubject(claims, request);
|
||||
|
||||
if (subject == null) {
|
||||
log.error("No subject found in JWT token");
|
||||
return null;
|
||||
}
|
||||
|
||||
final String[] roles = extractRoles(claims, request);
|
||||
|
||||
final AuthCredentials ac = new AuthCredentials(subject, roles).markComplete();
|
||||
|
||||
for(Entry<String, Object> claim: claims.entrySet()) {
|
||||
ac.addAttribute("attr.jwt."+claim.getKey(), String.valueOf(claim.getValue()));
|
||||
}
|
||||
|
||||
return ac;
|
||||
|
||||
} catch (WeakKeyException e) {
|
||||
log.error("Cannot authenticate user with JWT because of "+e, e);
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Invalid or expired JWT token.", e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean reRequestAuthentication(final RestChannel channel, AuthCredentials creds) {
|
||||
final BytesRestResponse wwwAuthenticateResponse = new BytesRestResponse(RestStatus.UNAUTHORIZED,"");
|
||||
wwwAuthenticateResponse.addHeader("WWW-Authenticate", "Bearer realm=\"Open Distro Security\"");
|
||||
channel.sendResponse(wwwAuthenticateResponse);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "jwt";
|
||||
}
|
||||
|
||||
protected String extractSubject(final Claims claims, final RestRequest request) {
|
||||
String subject = claims.getSubject();
|
||||
if(subjectKey != null) {
|
||||
// try to get roles from claims, first as Object to avoid having to catch the ExpectedTypeException
|
||||
Object subjectObject = claims.get(subjectKey, Object.class);
|
||||
if(subjectObject == null) {
|
||||
log.warn("Failed to get subject from JWT claims, check if subject_key '{}' is correct.", subjectKey);
|
||||
return null;
|
||||
}
|
||||
// We expect a String. If we find something else, convert to String but issue a warning
|
||||
if(!(subjectObject instanceof String)) {
|
||||
log.warn("Expected type String for roles in the JWT for subject_key {}, but value was '{}' ({}). Will convert this value to String.", subjectKey, subjectObject, subjectObject.getClass());
|
||||
}
|
||||
subject = String.valueOf(subjectObject);
|
||||
}
|
||||
return subject;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected String[] extractRoles(final Claims claims, final RestRequest request) {
|
||||
// no roles key specified
|
||||
if(rolesKey == null) {
|
||||
return new String[0];
|
||||
}
|
||||
// try to get roles from claims, first as Object to avoid having to catch the ExpectedTypeException
|
||||
final Object rolesObject = claims.get(rolesKey, Object.class);
|
||||
if(rolesObject == null) {
|
||||
log.warn("Failed to get roles from JWT claims with roles_key '{}'. Check if this key is correct and available in the JWT payload.", rolesKey);
|
||||
return new String[0];
|
||||
}
|
||||
|
||||
String[] roles = String.valueOf(rolesObject).split(",");
|
||||
|
||||
// We expect a String or Collection. If we find something else, convert to String but issue a warning
|
||||
if (!(rolesObject instanceof String) && !(rolesObject instanceof Collection<?>)) {
|
||||
log.warn("Expected type String or Collection for roles in the JWT for roles_key {}, but value was '{}' ({}). Will convert this value to String.", rolesKey, rolesObject, rolesObject.getClass());
|
||||
} else if (rolesObject instanceof Collection<?>) {
|
||||
roles = ((Collection<String>) rolesObject).toArray(new String[0]);
|
||||
}
|
||||
|
||||
for (int i = 0; i < roles.length; i++) {
|
||||
roles[i] = roles[i].trim();
|
||||
}
|
||||
|
||||
return roles;
|
||||
}
|
||||
|
||||
private static PublicKey getPublicKey(final byte[] keyBytes, final String algo) throws NoSuchAlgorithmException, InvalidKeySpecException {
|
||||
X509EncodedKeySpec spec = new X509EncodedKeySpec(keyBytes);
|
||||
KeyFactory kf = KeyFactory.getInstance(algo);
|
||||
return kf.generatePublic(spec);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
public class AuthenticatorUnavailableException extends RuntimeException {
|
||||
private static final long serialVersionUID = -7007025852090301416L;
|
||||
|
||||
public AuthenticatorUnavailableException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public AuthenticatorUnavailableException(String message, Throwable cause, boolean enableSuppression,
|
||||
boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public AuthenticatorUnavailableException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public AuthenticatorUnavailableException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public AuthenticatorUnavailableException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
public class BadCredentialsException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 9092575587366580869L;
|
||||
|
||||
public BadCredentialsException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public BadCredentialsException(String message, Throwable cause, boolean enableSuppression,
|
||||
boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public BadCredentialsException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public BadCredentialsException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public BadCredentialsException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
}
|
@ -0,0 +1,67 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.AbstractHTTPJwtAuthenticator;
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator;
|
||||
|
||||
public class HTTPJwtKeyByOpenIdConnectAuthenticator extends AbstractHTTPJwtAuthenticator {
|
||||
|
||||
private final static Logger log = LogManager.getLogger(HTTPJwtKeyByOpenIdConnectAuthenticator.class);
|
||||
|
||||
public HTTPJwtKeyByOpenIdConnectAuthenticator(Settings settings, Path configPath) {
|
||||
super(settings, configPath);
|
||||
}
|
||||
|
||||
protected KeyProvider initKeyProvider(Settings settings, Path configPath) throws Exception {
|
||||
int idpRequestTimeoutMs = settings.getAsInt("idp_request_timeout_ms", 5000);
|
||||
int idpQueuedThreadTimeoutMs = settings.getAsInt("idp_queued_thread_timeout_ms", 2500);
|
||||
|
||||
int refreshRateLimitTimeWindowMs = settings.getAsInt("refresh_rate_limit_time_window_ms", 10000);
|
||||
int refreshRateLimitCount = settings.getAsInt("refresh_rate_limit_count", 10);
|
||||
|
||||
KeySetRetriever keySetRetriever = new KeySetRetriever(settings.get("openid_connect_url"),
|
||||
getSSLConfig(settings, configPath), settings.getAsBoolean("cache_jwks_endpoint", false));
|
||||
|
||||
keySetRetriever.setRequestTimeoutMs(idpRequestTimeoutMs);
|
||||
|
||||
SelfRefreshingKeySet selfRefreshingKeySet = new SelfRefreshingKeySet(keySetRetriever);
|
||||
|
||||
selfRefreshingKeySet.setRequestTimeoutMs(idpRequestTimeoutMs);
|
||||
selfRefreshingKeySet.setQueuedThreadTimeoutMs(idpQueuedThreadTimeoutMs);
|
||||
selfRefreshingKeySet.setRefreshRateLimitTimeWindowMs(refreshRateLimitTimeWindowMs);
|
||||
selfRefreshingKeySet.setRefreshRateLimitCount(refreshRateLimitCount);
|
||||
|
||||
return selfRefreshingKeySet;
|
||||
}
|
||||
|
||||
private static SettingsBasedSSLConfigurator.SSLConfig getSSLConfig(Settings settings, Path configPath)
|
||||
throws Exception {
|
||||
return new SettingsBasedSSLConfigurator(settings, configPath, "openid_connect_idp").buildSSLConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "jwt-key-by-oidc";
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
import org.apache.cxf.rs.security.jose.jws.JwsJwtCompactConsumer;
|
||||
import org.apache.cxf.rs.security.jose.jws.JwsSignatureVerifier;
|
||||
import org.apache.cxf.rs.security.jose.jws.JwsUtils;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtClaims;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtException;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtToken;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtUtils;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
public class JwtVerifier {
|
||||
|
||||
private final KeyProvider keyProvider;
|
||||
|
||||
public JwtVerifier(KeyProvider keyProvider) {
|
||||
this.keyProvider = keyProvider;
|
||||
}
|
||||
|
||||
public JwtToken getVerifiedJwtToken(String encodedJwt) throws BadCredentialsException {
|
||||
try {
|
||||
JwsJwtCompactConsumer jwtConsumer = new JwsJwtCompactConsumer(encodedJwt);
|
||||
JwtToken jwt = jwtConsumer.getJwtToken();
|
||||
JsonWebKey key = keyProvider.getKey(jwt.getJwsHeaders().getKeyId());
|
||||
JwsSignatureVerifier signatureVerifier = getInitializedSignatureVerifier(key);
|
||||
|
||||
boolean signatureValid = jwtConsumer.verifySignatureWith(signatureVerifier);
|
||||
|
||||
if (!signatureValid && Strings.isNullOrEmpty(jwt.getJwsHeaders().getKeyId())) {
|
||||
key = keyProvider.getKeyAfterRefresh(null);
|
||||
signatureVerifier = getInitializedSignatureVerifier(key);
|
||||
signatureValid = jwtConsumer.verifySignatureWith(signatureVerifier);
|
||||
}
|
||||
|
||||
if (!signatureValid) {
|
||||
throw new BadCredentialsException("Invalid JWT signature");
|
||||
}
|
||||
|
||||
validateClaims(jwt);
|
||||
|
||||
return jwt;
|
||||
} catch (JwtException e) {
|
||||
throw new BadCredentialsException(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private JwsSignatureVerifier getInitializedSignatureVerifier(JsonWebKey key)
|
||||
throws BadCredentialsException, JwtException {
|
||||
JwsSignatureVerifier result = JwsUtils.getSignatureVerifier(key);
|
||||
|
||||
if (result == null) {
|
||||
throw new BadCredentialsException("Cannot verify JWT");
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
private void validateClaims(JwtToken jwt) throws BadCredentialsException, JwtException {
|
||||
JwtClaims claims = jwt.getClaims();
|
||||
|
||||
if (claims != null) {
|
||||
JwtUtils.validateJwtExpiry(claims, 0, false);
|
||||
JwtUtils.validateJwtNotBefore(claims, 0, false);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
|
||||
public interface KeyProvider {
|
||||
public JsonWebKey getKey(String kid) throws AuthenticatorUnavailableException, BadCredentialsException;
|
||||
public JsonWebKey getKeyAfterRefresh(String kid) throws AuthenticatorUnavailableException, BadCredentialsException;
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKeys;
|
||||
|
||||
@FunctionalInterface
|
||||
public interface KeySetProvider {
|
||||
JsonWebKeys get() throws AuthenticatorUnavailableException;
|
||||
}
|
@ -0,0 +1,225 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKeys;
|
||||
import org.apache.cxf.rs.security.jose.jwk.JwkUtils;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.StatusLine;
|
||||
import org.apache.http.client.cache.HttpCacheContext;
|
||||
import org.apache.http.client.cache.HttpCacheStorage;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.impl.client.cache.BasicHttpCacheStorage;
|
||||
import org.apache.http.impl.client.cache.CacheConfig;
|
||||
import org.apache.http.impl.client.cache.CachingHttpClients;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.oidc.json.OpenIdProviderConfiguration;
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator.SSLConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.DefaultObjectMapper;
|
||||
|
||||
|
||||
public class KeySetRetriever implements KeySetProvider {
|
||||
private final static Logger log = LogManager.getLogger(KeySetRetriever.class);
|
||||
private static final long CACHE_STATUS_LOG_INTERVAL_MS = 60L * 60L * 1000L;
|
||||
|
||||
private String openIdConnectEndpoint;
|
||||
private SSLConfig sslConfig;
|
||||
private int requestTimeoutMs = 10000;
|
||||
private CacheConfig cacheConfig;
|
||||
private HttpCacheStorage oidcHttpCacheStorage;
|
||||
private int oidcCacheHits = 0;
|
||||
private int oidcCacheMisses = 0;
|
||||
private int oidcCacheHitsValidated = 0;
|
||||
private int oidcCacheModuleResponses = 0;
|
||||
private long oidcRequests = 0;
|
||||
private long lastCacheStatusLog = 0;
|
||||
|
||||
KeySetRetriever(String openIdConnectEndpoint, SSLConfig sslConfig, boolean useCacheForOidConnectEndpoint) {
|
||||
this.openIdConnectEndpoint = openIdConnectEndpoint;
|
||||
this.sslConfig = sslConfig;
|
||||
|
||||
if (useCacheForOidConnectEndpoint) {
|
||||
cacheConfig = CacheConfig.custom().setMaxCacheEntries(10).setMaxObjectSize(1024L * 1024L).build();
|
||||
oidcHttpCacheStorage = new BasicHttpCacheStorage(cacheConfig);
|
||||
}
|
||||
}
|
||||
|
||||
public JsonWebKeys get() throws AuthenticatorUnavailableException {
|
||||
String uri = getJwksUri();
|
||||
|
||||
try (CloseableHttpClient httpClient = createHttpClient(null)) {
|
||||
|
||||
HttpGet httpGet = new HttpGet(uri);
|
||||
|
||||
RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(getRequestTimeoutMs())
|
||||
.setConnectTimeout(getRequestTimeoutMs()).setSocketTimeout(getRequestTimeoutMs()).build();
|
||||
|
||||
httpGet.setConfig(requestConfig);
|
||||
|
||||
try (CloseableHttpResponse response = httpClient.execute(httpGet)) {
|
||||
StatusLine statusLine = response.getStatusLine();
|
||||
|
||||
if (statusLine.getStatusCode() < 200 || statusLine.getStatusCode() >= 300) {
|
||||
throw new AuthenticatorUnavailableException("Error while getting " + uri + ": " + statusLine);
|
||||
}
|
||||
|
||||
HttpEntity httpEntity = response.getEntity();
|
||||
|
||||
if (httpEntity == null) {
|
||||
throw new AuthenticatorUnavailableException(
|
||||
"Error while getting " + uri + ": Empty response entity");
|
||||
}
|
||||
|
||||
JsonWebKeys keySet = JwkUtils.readJwkSet(httpEntity.getContent());
|
||||
|
||||
return keySet;
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new AuthenticatorUnavailableException("Error while getting " + uri + ": " + e, e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
String getJwksUri() throws AuthenticatorUnavailableException {
|
||||
|
||||
try (CloseableHttpClient httpClient = createHttpClient(oidcHttpCacheStorage)) {
|
||||
|
||||
HttpGet httpGet = new HttpGet(openIdConnectEndpoint);
|
||||
|
||||
RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(getRequestTimeoutMs())
|
||||
.setConnectTimeout(getRequestTimeoutMs()).setSocketTimeout(getRequestTimeoutMs()).build();
|
||||
|
||||
httpGet.setConfig(requestConfig);
|
||||
|
||||
HttpCacheContext httpContext = null;
|
||||
|
||||
if (oidcHttpCacheStorage != null) {
|
||||
httpContext = new HttpCacheContext();
|
||||
}
|
||||
|
||||
try (CloseableHttpResponse response = httpClient.execute(httpGet, httpContext)) {
|
||||
if (httpContext != null) {
|
||||
logCacheResponseStatus(httpContext);
|
||||
}
|
||||
|
||||
StatusLine statusLine = response.getStatusLine();
|
||||
|
||||
if (statusLine.getStatusCode() < 200 || statusLine.getStatusCode() >= 300) {
|
||||
throw new AuthenticatorUnavailableException(
|
||||
"Error while getting " + openIdConnectEndpoint + ": " + statusLine);
|
||||
}
|
||||
|
||||
HttpEntity httpEntity = response.getEntity();
|
||||
|
||||
if (httpEntity == null) {
|
||||
throw new AuthenticatorUnavailableException(
|
||||
"Error while getting " + openIdConnectEndpoint + ": Empty response entity");
|
||||
}
|
||||
|
||||
OpenIdProviderConfiguration parsedEntity = DefaultObjectMapper.objectMapper.readValue(httpEntity.getContent(),
|
||||
OpenIdProviderConfiguration.class);
|
||||
|
||||
return parsedEntity.getJwksUri();
|
||||
|
||||
}
|
||||
|
||||
} catch (IOException e) {
|
||||
throw new AuthenticatorUnavailableException("Error while getting " + openIdConnectEndpoint + ": " + e, e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public int getRequestTimeoutMs() {
|
||||
return requestTimeoutMs;
|
||||
}
|
||||
|
||||
public void setRequestTimeoutMs(int httpTimeoutMs) {
|
||||
this.requestTimeoutMs = httpTimeoutMs;
|
||||
}
|
||||
|
||||
private void logCacheResponseStatus(HttpCacheContext httpContext) {
|
||||
this.oidcRequests++;
|
||||
|
||||
switch (httpContext.getCacheResponseStatus()) {
|
||||
case CACHE_HIT:
|
||||
this.oidcCacheHits++;
|
||||
break;
|
||||
case CACHE_MODULE_RESPONSE:
|
||||
this.oidcCacheModuleResponses++;
|
||||
break;
|
||||
case CACHE_MISS:
|
||||
this.oidcCacheMisses++;
|
||||
break;
|
||||
case VALIDATED:
|
||||
this.oidcCacheHitsValidated++;
|
||||
break;
|
||||
}
|
||||
|
||||
long now = System.currentTimeMillis();
|
||||
|
||||
if (this.oidcRequests >= 2 && now - lastCacheStatusLog > CACHE_STATUS_LOG_INTERVAL_MS) {
|
||||
log.info("Cache status for KeySetRetriever:\noidcCacheHits: " + oidcCacheHits + "\noidcCacheHitsValidated: "
|
||||
+ oidcCacheHitsValidated + "\noidcCacheModuleResponses: " + oidcCacheModuleResponses
|
||||
+ "\noidcCacheMisses: " + oidcCacheMisses);
|
||||
|
||||
lastCacheStatusLog = now;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private CloseableHttpClient createHttpClient(HttpCacheStorage httpCacheStorage) {
|
||||
HttpClientBuilder builder;
|
||||
|
||||
if (httpCacheStorage != null) {
|
||||
builder = CachingHttpClients.custom().setCacheConfig(cacheConfig).setHttpCacheStorage(httpCacheStorage);
|
||||
} else {
|
||||
builder = HttpClients.custom();
|
||||
}
|
||||
|
||||
builder.useSystemProperties();
|
||||
|
||||
if (sslConfig != null) {
|
||||
builder.setSSLSocketFactory(sslConfig.toSSLConnectionSocketFactory());
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
public int getOidcCacheHits() {
|
||||
return oidcCacheHits;
|
||||
}
|
||||
|
||||
public int getOidcCacheMisses() {
|
||||
return oidcCacheMisses;
|
||||
}
|
||||
|
||||
public int getOidcCacheHitsValidated() {
|
||||
return oidcCacheHitsValidated;
|
||||
}
|
||||
|
||||
public int getOidcCacheModuleResponses() {
|
||||
return oidcCacheModuleResponses;
|
||||
}
|
||||
}
|
@ -0,0 +1,324 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Future;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.RejectedExecutionException;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKeys;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
public class SelfRefreshingKeySet implements KeyProvider {
|
||||
private static final Logger log = LogManager.getLogger(SelfRefreshingKeySet.class);
|
||||
|
||||
private final KeySetProvider keySetProvider;
|
||||
private final ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(1, 10, 1000, TimeUnit.MILLISECONDS,
|
||||
new LinkedBlockingQueue<Runnable>());
|
||||
private volatile JsonWebKeys jsonWebKeys = new JsonWebKeys();
|
||||
private boolean refreshInProgress = false;
|
||||
private long refreshCount = 0;
|
||||
private long queuedGetCount = 0;
|
||||
private long recentRefreshCount = 0;
|
||||
private long refreshTime = 0;
|
||||
private Throwable lastRefreshFailure = null;
|
||||
private int requestTimeoutMs = 5000;
|
||||
private int queuedThreadTimeoutMs = 2500;
|
||||
private int refreshRateLimitTimeWindowMs = 10000;
|
||||
private int refreshRateLimitCount = 10;
|
||||
|
||||
public SelfRefreshingKeySet(KeySetProvider refreshFunction) {
|
||||
this.keySetProvider = refreshFunction;
|
||||
}
|
||||
|
||||
public JsonWebKey getKey(String kid) throws AuthenticatorUnavailableException, BadCredentialsException {
|
||||
if (Strings.isNullOrEmpty(kid)) {
|
||||
return getKeyWithoutKeyId();
|
||||
} else {
|
||||
return getKeyWithKeyId(kid);
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized JsonWebKey getKeyAfterRefresh(String kid)
|
||||
throws AuthenticatorUnavailableException, BadCredentialsException {
|
||||
JsonWebKey result = getKeyAfterRefreshInternal(kid);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
} else if (jsonWebKeys.getKeys().size() == 0) {
|
||||
throw new AuthenticatorUnavailableException("No JWK are available from IdP");
|
||||
} else {
|
||||
throw new BadCredentialsException("JWT did not contain KID which is required if IdP provides multiple JWK");
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized JsonWebKey getKeyAfterRefreshInternal(String kid) throws AuthenticatorUnavailableException {
|
||||
if (refreshInProgress) {
|
||||
return waitForRefreshToFinish(kid);
|
||||
} else {
|
||||
return performRefresh(kid);
|
||||
}
|
||||
}
|
||||
|
||||
private JsonWebKey getKeyWithoutKeyId() throws AuthenticatorUnavailableException, BadCredentialsException {
|
||||
List<JsonWebKey> keys = jsonWebKeys.getKeys();
|
||||
|
||||
if (keys == null || keys.size() == 0) {
|
||||
JsonWebKey result = getKeyWithRefresh(null);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
} else {
|
||||
throw new AuthenticatorUnavailableException("No JWK are available from IdP");
|
||||
}
|
||||
} else if (keys.size() == 1) {
|
||||
return keys.get(0);
|
||||
} else {
|
||||
JsonWebKey result = getKeyWithRefresh(null);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
} else {
|
||||
throw new BadCredentialsException(
|
||||
"JWT did not contain KID which is required if IdP provides multiple JWK");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private JsonWebKey getKeyWithKeyId(String kid) throws AuthenticatorUnavailableException, BadCredentialsException {
|
||||
JsonWebKey result = jsonWebKeys.getKey(kid);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
result = getKeyWithRefresh(kid);
|
||||
|
||||
if (result == null) {
|
||||
throw new BadCredentialsException("Unknown kid " + kid);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private synchronized JsonWebKey getKeyWithRefresh(String kid) throws AuthenticatorUnavailableException {
|
||||
|
||||
// Always re-check within synchronized to handle any races
|
||||
|
||||
JsonWebKey result = getKeySimple(kid);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
return getKeyAfterRefreshInternal(kid);
|
||||
}
|
||||
|
||||
private JsonWebKey getKeySimple(String kid) {
|
||||
if (Strings.isNullOrEmpty(kid)) {
|
||||
List<JsonWebKey> keys = jsonWebKeys.getKeys();
|
||||
|
||||
if (keys != null && keys.size() == 1) {
|
||||
return keys.get(0);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
} else {
|
||||
return jsonWebKeys.getKey(kid);
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized JsonWebKey waitForRefreshToFinish(String kid) {
|
||||
queuedGetCount++;
|
||||
long currentRefreshCount = refreshCount;
|
||||
|
||||
try {
|
||||
wait(queuedThreadTimeoutMs);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
log.debug(e);
|
||||
}
|
||||
|
||||
// Just be optimistic and re-check the key
|
||||
|
||||
JsonWebKey result = getKeySimple(kid);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (refreshInProgress && currentRefreshCount == refreshCount) {
|
||||
// The wait() call returned due to the timeout.
|
||||
throw new AuthenticatorUnavailableException("Authentication backend timed out");
|
||||
} else if (lastRefreshFailure != null) {
|
||||
throw new AuthenticatorUnavailableException("Authentication backend failed", lastRefreshFailure);
|
||||
} else {
|
||||
// Refresh was successful, but we did not get a matching key
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized JsonWebKey performRefresh(String kid) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("performRefresh({})", kid);
|
||||
}
|
||||
|
||||
final boolean recentRefresh;
|
||||
|
||||
if (System.currentTimeMillis() - refreshTime < refreshRateLimitTimeWindowMs) {
|
||||
recentRefreshCount++;
|
||||
recentRefresh = true;
|
||||
|
||||
if (recentRefreshCount > refreshRateLimitCount) {
|
||||
throw new AuthenticatorUnavailableException("Too many unknown kids recently: " + recentRefreshCount);
|
||||
}
|
||||
} else {
|
||||
recentRefresh = false;
|
||||
}
|
||||
|
||||
refreshInProgress = true;
|
||||
refreshCount++;
|
||||
|
||||
log.info("Performing refresh {}", refreshCount);
|
||||
|
||||
long currentRefreshCount = refreshCount;
|
||||
|
||||
try {
|
||||
|
||||
Future<?> future = threadPoolExecutor.submit(new Runnable() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
JsonWebKeys newKeys = keySetProvider.get();
|
||||
|
||||
if (newKeys == null) {
|
||||
throw new RuntimeException("Refresh function " + keySetProvider + " yielded null");
|
||||
}
|
||||
|
||||
log.info("KeySetProvider finished");
|
||||
|
||||
synchronized (SelfRefreshingKeySet.this) {
|
||||
jsonWebKeys = newKeys;
|
||||
refreshInProgress = false;
|
||||
lastRefreshFailure = null;
|
||||
SelfRefreshingKeySet.this.notifyAll();
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
synchronized (SelfRefreshingKeySet.this) {
|
||||
lastRefreshFailure = e;
|
||||
refreshInProgress = false;
|
||||
SelfRefreshingKeySet.this.notifyAll();
|
||||
}
|
||||
log.warn("KeySetProvider threw error", e);
|
||||
} finally {
|
||||
if (!recentRefresh) {
|
||||
recentRefreshCount = 0;
|
||||
refreshTime = System.currentTimeMillis();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
wait(requestTimeoutMs);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
log.debug(e);
|
||||
}
|
||||
|
||||
JsonWebKey result = getKeySimple(kid);
|
||||
|
||||
if (result != null) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (refreshInProgress && currentRefreshCount == refreshCount) {
|
||||
if (!future.isDone()) {
|
||||
future.cancel(true);
|
||||
}
|
||||
|
||||
lastRefreshFailure = new AuthenticatorUnavailableException("Authentication backend timed out");
|
||||
|
||||
throw new AuthenticatorUnavailableException("Authentication backend timed out");
|
||||
}
|
||||
|
||||
if (lastRefreshFailure != null) {
|
||||
throw new AuthenticatorUnavailableException("Authentication backend failed", lastRefreshFailure);
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
} catch (RejectedExecutionException e) {
|
||||
throw new AuthenticatorUnavailableException("Did not try to call authentication backend because of "
|
||||
+ threadPoolExecutor.getActiveCount() + " pending threads", e);
|
||||
} finally {
|
||||
if (refreshInProgress && currentRefreshCount == refreshCount) {
|
||||
refreshInProgress = false;
|
||||
notifyAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int getRequestTimeoutMs() {
|
||||
return requestTimeoutMs;
|
||||
}
|
||||
|
||||
public void setRequestTimeoutMs(int requestTimeoutMs) {
|
||||
this.requestTimeoutMs = requestTimeoutMs;
|
||||
}
|
||||
|
||||
public int getQueuedThreadTimeoutMs() {
|
||||
return queuedThreadTimeoutMs;
|
||||
}
|
||||
|
||||
public void setQueuedThreadTimeoutMs(int queuedThreadTimeoutMs) {
|
||||
this.queuedThreadTimeoutMs = queuedThreadTimeoutMs;
|
||||
}
|
||||
|
||||
public long getRefreshCount() {
|
||||
return refreshCount;
|
||||
}
|
||||
|
||||
public long getQueuedGetCount() {
|
||||
return queuedGetCount;
|
||||
}
|
||||
|
||||
public int getRefreshRateLimitTimeWindowMs() {
|
||||
return refreshRateLimitTimeWindowMs;
|
||||
}
|
||||
|
||||
public void setRefreshRateLimitTimeWindowMs(int refreshRateLimitTimeWindowMs) {
|
||||
this.refreshRateLimitTimeWindowMs = refreshRateLimitTimeWindowMs;
|
||||
}
|
||||
|
||||
public int getRefreshRateLimitCount() {
|
||||
return refreshRateLimitCount;
|
||||
}
|
||||
|
||||
public void setRefreshRateLimitCount(int refreshRateLimitCount) {
|
||||
this.refreshRateLimitCount = refreshRateLimitCount;
|
||||
}
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.oidc.json;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class OpenIdProviderConfiguration {
|
||||
|
||||
@JsonProperty("jwks_uri")
|
||||
private String jwksUri;
|
||||
|
||||
public String getJwksUri() {
|
||||
return jwksUri;
|
||||
}
|
||||
|
||||
public void setJwksUri(String jwksUri) {
|
||||
this.jwksUri = jwksUri;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,445 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.kerberos;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.AccessController;
|
||||
import java.security.Principal;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Base64;
|
||||
|
||||
import javax.security.auth.Subject;
|
||||
import javax.security.auth.login.LoginException;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.env.Environment;
|
||||
//import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.ietf.jgss.GSSContext;
|
||||
import org.ietf.jgss.GSSCredential;
|
||||
import org.ietf.jgss.GSSException;
|
||||
import org.ietf.jgss.GSSManager;
|
||||
import org.ietf.jgss.GSSName;
|
||||
|
||||
import com.amazon.dlic.auth.http.kerberos.util.JaasKrbUtil;
|
||||
import com.amazon.dlic.auth.http.kerberos.util.KrbConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.HTTPAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
public class HTTPSpnegoAuthenticator implements HTTPAuthenticator {
|
||||
|
||||
private static final String EMPTY_STRING = "";
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
|
||||
private boolean stripRealmFromPrincipalName;
|
||||
private String acceptorPrincipal;
|
||||
private Path acceptorKeyTabPath;
|
||||
|
||||
public HTTPSpnegoAuthenticator(final Settings settings, final Path configPath) {
|
||||
super();
|
||||
try {
|
||||
final Path configDir = new Environment(settings, configPath).configFile();
|
||||
final String krb5PathSetting = settings.get("opendistro_security.kerberos.krb5_filepath");
|
||||
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
AccessController.doPrivileged(new PrivilegedAction<Void>() {
|
||||
|
||||
@Override
|
||||
public Void run() {
|
||||
|
||||
try {
|
||||
if (settings.getAsBoolean("krb_debug", false)) {
|
||||
JaasKrbUtil.setDebug(true);
|
||||
System.setProperty("sun.security.krb5.debug", "true");
|
||||
System.setProperty("java.security.debug", "gssloginconfig,logincontext,configparser,configfile");
|
||||
System.setProperty("sun.security.spnego.debug", "true");
|
||||
System.out.println("Kerberos debug is enabled");
|
||||
System.err.println("Kerberos debug is enabled");
|
||||
log.info("Kerberos debug is enabled on stdout");
|
||||
} else {
|
||||
log.debug("Kerberos debug is NOT enabled");
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
log.error("Unable to enable krb_debug due to ", e);
|
||||
System.err.println("Unable to enable krb_debug due to "+ExceptionsHelper.stackTrace(e));
|
||||
System.out.println("Unable to enable krb_debug due to "+ExceptionsHelper.stackTrace(e));
|
||||
}
|
||||
|
||||
System.setProperty(KrbConstants.USE_SUBJECT_CREDS_ONLY_PROP, "false");
|
||||
|
||||
String krb5Path = krb5PathSetting;
|
||||
|
||||
if(!Strings.isNullOrEmpty(krb5Path)) {
|
||||
|
||||
if(Paths.get(krb5Path).isAbsolute()) {
|
||||
log.debug("krb5_filepath: {}", krb5Path);
|
||||
System.setProperty(KrbConstants.KRB5_CONF_PROP, krb5Path);
|
||||
} else {
|
||||
krb5Path = configDir.resolve(krb5Path).toAbsolutePath().toString();
|
||||
log.debug("krb5_filepath (resolved from {}): {}", configDir, krb5Path);
|
||||
}
|
||||
|
||||
System.setProperty(KrbConstants.KRB5_CONF_PROP, krb5Path);
|
||||
} else {
|
||||
if(Strings.isNullOrEmpty(System.getProperty(KrbConstants.KRB5_CONF_PROP))) {
|
||||
System.setProperty(KrbConstants.KRB5_CONF_PROP, "/etc/krb5.conf");
|
||||
log.debug("krb5_filepath (was not set or configured, set to default): /etc/krb5.conf");
|
||||
}
|
||||
}
|
||||
|
||||
stripRealmFromPrincipalName = settings.getAsBoolean("strip_realm_from_principal", true);
|
||||
acceptorPrincipal = settings.get("opendistro_security.kerberos.acceptor_principal");
|
||||
final String _acceptorKeyTabPath = settings.get("opendistro_security.kerberos.acceptor_keytab_filepath");
|
||||
|
||||
if(acceptorPrincipal == null || acceptorPrincipal.length() == 0) {
|
||||
log.error("acceptor_principal must not be null or empty. Kerberos authentication will not work");
|
||||
acceptorPrincipal = null;
|
||||
}
|
||||
|
||||
if(_acceptorKeyTabPath == null || _acceptorKeyTabPath.length() == 0) {
|
||||
log.error("opendistro_security.kerberos.acceptor_keytab_filepath must not be null or empty. Kerberos authentication will not work");
|
||||
acceptorKeyTabPath = null;
|
||||
} else {
|
||||
acceptorKeyTabPath = configDir.resolve(settings.get("opendistro_security.kerberos.acceptor_keytab_filepath"));
|
||||
|
||||
if(!Files.exists(acceptorKeyTabPath)) {
|
||||
log.error("Unable to read keytab from {} - Maybe the file does not exist or is not readable. Kerberos authentication will not work", acceptorKeyTabPath);
|
||||
acceptorKeyTabPath = null;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
log.debug("strip_realm_from_principal {}", stripRealmFromPrincipalName);
|
||||
log.debug("acceptor_principal {}", acceptorPrincipal);
|
||||
log.debug("acceptor_keytab_filepath {}", acceptorKeyTabPath);
|
||||
|
||||
} catch (Throwable e) {
|
||||
log.error("Cannot construct HTTPSpnegoAuthenticator due to {}", e.getMessage(), e);
|
||||
log.error("Please make sure you configured 'opendistro_security.kerberos.acceptor_keytab_filepath' realtive to the ES config/ dir!");
|
||||
throw e;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthCredentials extractCredentials(final RestRequest request, ThreadContext threadContext) {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
AuthCredentials creds = AccessController.doPrivileged(new PrivilegedAction<AuthCredentials>() {
|
||||
@Override
|
||||
public AuthCredentials run() {
|
||||
return extractCredentials0(request);
|
||||
}
|
||||
});
|
||||
|
||||
return creds;
|
||||
}
|
||||
|
||||
private AuthCredentials extractCredentials0(final RestRequest request) {
|
||||
|
||||
if (acceptorPrincipal == null || acceptorKeyTabPath == null) {
|
||||
log.error("Missing acceptor principal or keytab configuration. Kerberos authentication will not work");
|
||||
return null;
|
||||
}
|
||||
|
||||
Principal principal = null;
|
||||
final String authorizationHeader = request.header("Authorization");
|
||||
|
||||
if (authorizationHeader != null) {
|
||||
if (!authorizationHeader.trim().toLowerCase().startsWith("negotiate ")) {
|
||||
log.warn("No 'Negotiate Authorization' header, send 401 and 'WWW-Authenticate Negotiate'");
|
||||
return null;
|
||||
} else {
|
||||
final byte[] decodedNegotiateHeader = Base64.getDecoder().decode(authorizationHeader.substring(10));
|
||||
|
||||
GSSContext gssContext = null;
|
||||
byte[] outToken = null;
|
||||
|
||||
try {
|
||||
|
||||
final Subject subject = JaasKrbUtil.loginUsingKeytab(acceptorPrincipal, acceptorKeyTabPath, false);
|
||||
|
||||
final GSSManager manager = GSSManager.getInstance();
|
||||
final int credentialLifetime = GSSCredential.INDEFINITE_LIFETIME;
|
||||
|
||||
final PrivilegedExceptionAction<GSSCredential> action = new PrivilegedExceptionAction<GSSCredential>() {
|
||||
@Override
|
||||
public GSSCredential run() throws GSSException {
|
||||
return manager.createCredential(null, credentialLifetime, KrbConstants.SPNEGO, GSSCredential.ACCEPT_ONLY);
|
||||
}
|
||||
};
|
||||
gssContext = manager.createContext(Subject.doAs(subject, action));
|
||||
|
||||
outToken = Subject.doAs(subject, new AcceptAction(gssContext, decodedNegotiateHeader));
|
||||
|
||||
if (outToken == null) {
|
||||
log.warn("Ticket validation not successful, outToken is null");
|
||||
return null;
|
||||
}
|
||||
|
||||
principal = Subject.doAs(subject, new AuthenticateAction(log, gssContext, stripRealmFromPrincipalName));
|
||||
|
||||
} catch (final LoginException e) {
|
||||
log.error("Login exception due to", e);
|
||||
return null;
|
||||
} catch (final GSSException e) {
|
||||
log.error("Ticket validation not successful due to", e);
|
||||
return null;
|
||||
} catch (final PrivilegedActionException e) {
|
||||
final Throwable cause = e.getCause();
|
||||
if (cause instanceof GSSException) {
|
||||
log.warn("Service login not successful due to", e);
|
||||
} else {
|
||||
log.error("Service login not successful due to", e);
|
||||
}
|
||||
return null;
|
||||
} finally {
|
||||
if (gssContext != null) {
|
||||
try {
|
||||
gssContext.dispose();
|
||||
} catch (final GSSException e) {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (principal == null) {
|
||||
return new AuthCredentials("_incomplete_", (Object) outToken);
|
||||
}
|
||||
|
||||
|
||||
final String username = ((SimpleUserPrincipal) principal).getName();
|
||||
|
||||
if(username == null || username.length() == 0) {
|
||||
log.error("Got empty or null user from kerberos. Normally this means that you acceptor principal {} does not match the server hostname", acceptorPrincipal);
|
||||
}
|
||||
|
||||
return new AuthCredentials(username, (Object) outToken).markComplete();
|
||||
|
||||
}
|
||||
} else {
|
||||
log.trace("No 'Authorization' header, send 401 and 'WWW-Authenticate Negotiate'");
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean reRequestAuthentication(final RestChannel channel, AuthCredentials creds) {
|
||||
|
||||
final BytesRestResponse wwwAuthenticateResponse;
|
||||
XContentBuilder response = getNegotiateResponseBody();
|
||||
|
||||
if (response != null) {
|
||||
wwwAuthenticateResponse = new BytesRestResponse(RestStatus.UNAUTHORIZED, response);
|
||||
} else {
|
||||
wwwAuthenticateResponse = new BytesRestResponse(RestStatus.UNAUTHORIZED, EMPTY_STRING);
|
||||
}
|
||||
|
||||
if(creds == null || creds.getNativeCredentials() == null) {
|
||||
wwwAuthenticateResponse.addHeader("WWW-Authenticate", "Negotiate");
|
||||
} else {
|
||||
wwwAuthenticateResponse.addHeader("WWW-Authenticate", "Negotiate "+Base64.getEncoder().encodeToString((byte[]) creds.getNativeCredentials()));
|
||||
}
|
||||
channel.sendResponse(wwwAuthenticateResponse);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "spnego";
|
||||
}
|
||||
|
||||
/**
|
||||
* This class gets a gss credential via a privileged action.
|
||||
*/
|
||||
//borrowed from Apache Tomcat 8 http://svn.apache.org/repos/asf/tomcat/tc8.0.x/trunk/
|
||||
private static class AcceptAction implements PrivilegedExceptionAction<byte[]> {
|
||||
|
||||
GSSContext gssContext;
|
||||
|
||||
byte[] decoded;
|
||||
|
||||
AcceptAction(final GSSContext context, final byte[] decodedToken) {
|
||||
this.gssContext = context;
|
||||
this.decoded = decodedToken;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] run() throws GSSException {
|
||||
return gssContext.acceptSecContext(decoded, 0, decoded.length);
|
||||
}
|
||||
}
|
||||
|
||||
//borrowed from Apache Tomcat 8 http://svn.apache.org/repos/asf/tomcat/tc8.0.x/trunk/
|
||||
private static class AuthenticateAction implements PrivilegedAction<Principal> {
|
||||
|
||||
private final Logger logger;
|
||||
private final GSSContext gssContext;
|
||||
private final boolean strip;
|
||||
|
||||
private AuthenticateAction(final Logger logger, final GSSContext gssContext, final boolean strip) {
|
||||
super();
|
||||
this.logger = logger;
|
||||
this.gssContext = gssContext;
|
||||
this.strip = strip;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Principal run() {
|
||||
return new SimpleUserPrincipal(getUsernameFromGSSContext(gssContext, strip, logger));
|
||||
}
|
||||
}
|
||||
|
||||
//borrowed from Apache Tomcat 8 http://svn.apache.org/repos/asf/tomcat/tc8.0.x/trunk/
|
||||
private static String getUsernameFromGSSContext(final GSSContext gssContext, final boolean strip, final Logger logger) {
|
||||
if (gssContext.isEstablished()) {
|
||||
GSSName gssName = null;
|
||||
try {
|
||||
gssName = gssContext.getSrcName();
|
||||
} catch (final GSSException e) {
|
||||
logger.error("Unable to get src name from gss context", e);
|
||||
}
|
||||
|
||||
if (gssName != null) {
|
||||
String name = gssName.toString();
|
||||
return stripRealmName(name, strip);
|
||||
} else {
|
||||
logger.error("GSS name is null");
|
||||
}
|
||||
} else {
|
||||
logger.error("GSS context not established");
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private XContentBuilder getNegotiateResponseBody() {
|
||||
try {
|
||||
XContentBuilder negotiateResponseBody = XContentFactory.jsonBuilder();
|
||||
negotiateResponseBody.startObject();
|
||||
negotiateResponseBody.field("error");
|
||||
negotiateResponseBody.startObject();
|
||||
negotiateResponseBody.field("header");
|
||||
negotiateResponseBody.startObject();
|
||||
negotiateResponseBody.field("WWW-Authenticate", "Negotiate");
|
||||
negotiateResponseBody.endObject();
|
||||
negotiateResponseBody.endObject();
|
||||
negotiateResponseBody.endObject();
|
||||
return negotiateResponseBody;
|
||||
} catch (Exception ex) {
|
||||
log.error("Can't construct response body", ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static String stripRealmName(String name, boolean strip){
|
||||
if (strip && name != null) {
|
||||
final int i = name.indexOf('@');
|
||||
if (i > 0) {
|
||||
// Zero so we don;t leave a zero length name
|
||||
name = name.substring(0, i);
|
||||
}
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
private static class SimpleUserPrincipal implements Principal, Serializable {
|
||||
|
||||
private static final long serialVersionUID = -1;
|
||||
private final String username;
|
||||
|
||||
SimpleUserPrincipal(final String username) {
|
||||
super();
|
||||
this.username = username;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((username == null) ? 0 : username.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(final Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
final SimpleUserPrincipal other = (SimpleUserPrincipal) obj;
|
||||
if (username == null) {
|
||||
if (other.username != null) {
|
||||
return false;
|
||||
}
|
||||
} else if (!username.equals(other.username)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return this.username;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
final StringBuilder buffer = new StringBuilder();
|
||||
buffer.append("[principal: ");
|
||||
buffer.append(this.username);
|
||||
buffer.append("]");
|
||||
return buffer.toString();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,214 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.kerberos.util;
|
||||
|
||||
//Source: Apache Kerby project
|
||||
//https://directory.apache.org/kerby/
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.Principal;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.security.auth.Subject;
|
||||
import javax.security.auth.callback.Callback;
|
||||
import javax.security.auth.callback.CallbackHandler;
|
||||
import javax.security.auth.callback.PasswordCallback;
|
||||
import javax.security.auth.callback.UnsupportedCallbackException;
|
||||
import javax.security.auth.kerberos.KerberosPrincipal;
|
||||
import javax.security.auth.login.AppConfigurationEntry;
|
||||
import javax.security.auth.login.Configuration;
|
||||
import javax.security.auth.login.LoginContext;
|
||||
import javax.security.auth.login.LoginException;
|
||||
|
||||
/**
|
||||
* JAAS utilities for Kerberos login.
|
||||
*/
|
||||
public final class JaasKrbUtil {
|
||||
|
||||
private static boolean debug = false;
|
||||
|
||||
private JaasKrbUtil() {
|
||||
}
|
||||
|
||||
public static void setDebug(final boolean debug) {
|
||||
JaasKrbUtil.debug = debug;
|
||||
}
|
||||
|
||||
public static Subject loginUsingPassword(final String principal, final String password) throws LoginException {
|
||||
final Set<Principal> principals = new HashSet<Principal>();
|
||||
principals.add(new KerberosPrincipal(principal));
|
||||
|
||||
final Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
|
||||
|
||||
final Configuration conf = usePassword(principal);
|
||||
final String confName = "PasswordConf";
|
||||
final CallbackHandler callback = new KrbCallbackHandler(principal, password);
|
||||
final LoginContext loginContext = new LoginContext(confName, subject, callback, conf);
|
||||
loginContext.login();
|
||||
return loginContext.getSubject();
|
||||
}
|
||||
|
||||
public static Subject loginUsingTicketCache(final String principal, final Path cachePath) throws LoginException {
|
||||
final Set<Principal> principals = new HashSet<Principal>();
|
||||
principals.add(new KerberosPrincipal(principal));
|
||||
|
||||
final Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
|
||||
|
||||
final Configuration conf = useTicketCache(principal, cachePath);
|
||||
final String confName = "TicketCacheConf";
|
||||
final LoginContext loginContext = new LoginContext(confName, subject, null, conf);
|
||||
loginContext.login();
|
||||
return loginContext.getSubject();
|
||||
}
|
||||
|
||||
public static Subject loginUsingKeytab(final String principal, final Path keytabPath, final boolean initiator) throws LoginException {
|
||||
final Set<Principal> principals = new HashSet<Principal>();
|
||||
principals.add(new KerberosPrincipal(principal));
|
||||
|
||||
final Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
|
||||
|
||||
final Configuration conf = useKeytab(principal, keytabPath, initiator);
|
||||
final String confName = "KeytabConf";
|
||||
final LoginContext loginContext = new LoginContext(confName, subject, null, conf);
|
||||
loginContext.login();
|
||||
return loginContext.getSubject();
|
||||
}
|
||||
|
||||
public static Configuration usePassword(final String principal) {
|
||||
return new PasswordJaasConf(principal);
|
||||
}
|
||||
|
||||
public static Configuration useTicketCache(final String principal, final Path credentialPath) {
|
||||
return new TicketCacheJaasConf(principal, credentialPath);
|
||||
}
|
||||
|
||||
public static Configuration useKeytab(final String principal, final Path keytabPath, final boolean initiator) {
|
||||
return new KeytabJaasConf(principal, keytabPath, initiator);
|
||||
}
|
||||
|
||||
private static String getKrb5LoginModuleName() {
|
||||
return System.getProperty("java.vendor").contains("IBM") ? "com.ibm.security.auth.module.Krb5LoginModule"
|
||||
: "com.sun.security.auth.module.Krb5LoginModule";
|
||||
}
|
||||
|
||||
static class KeytabJaasConf extends Configuration {
|
||||
private final String principal;
|
||||
private final Path keytabPath;
|
||||
private final boolean initiator;
|
||||
|
||||
public KeytabJaasConf(final String principal, final Path keytab, final boolean initiator) {
|
||||
this.principal = principal;
|
||||
this.keytabPath = keytab;
|
||||
this.initiator = initiator;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AppConfigurationEntry[] getAppConfigurationEntry(final String name) {
|
||||
final Map<String, String> options = new HashMap<String, String>();
|
||||
options.put("keyTab", keytabPath.toAbsolutePath().toString());
|
||||
options.put("principal", principal);
|
||||
options.put("useKeyTab", "true");
|
||||
options.put("storeKey", "true");
|
||||
options.put("doNotPrompt", "true");
|
||||
options.put("renewTGT", "false");
|
||||
options.put("refreshKrb5Config", "true");
|
||||
options.put("isInitiator", String.valueOf(initiator));
|
||||
options.put("debug", String.valueOf(debug));
|
||||
|
||||
return new AppConfigurationEntry[] { new AppConfigurationEntry(getKrb5LoginModuleName(),
|
||||
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) };
|
||||
}
|
||||
}
|
||||
|
||||
static class TicketCacheJaasConf extends Configuration {
|
||||
private final String principal;
|
||||
private final Path clientCredentialPath;
|
||||
|
||||
public TicketCacheJaasConf(final String principal, final Path clientCredentialPath) {
|
||||
this.principal = principal;
|
||||
this.clientCredentialPath = clientCredentialPath;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AppConfigurationEntry[] getAppConfigurationEntry(final String name) {
|
||||
final Map<String, String> options = new HashMap<String, String>();
|
||||
options.put("principal", principal);
|
||||
options.put("storeKey", "false");
|
||||
options.put("doNotPrompt", "false");
|
||||
options.put("useTicketCache", "true");
|
||||
options.put("renewTGT", "true");
|
||||
options.put("refreshKrb5Config", "true");
|
||||
options.put("isInitiator", "true");
|
||||
options.put("ticketCache", clientCredentialPath.toAbsolutePath().toString());
|
||||
options.put("debug", String.valueOf(debug));
|
||||
|
||||
return new AppConfigurationEntry[] { new AppConfigurationEntry(getKrb5LoginModuleName(),
|
||||
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) };
|
||||
}
|
||||
}
|
||||
|
||||
static class PasswordJaasConf extends Configuration {
|
||||
private final String principal;
|
||||
|
||||
public PasswordJaasConf(final String principal) {
|
||||
this.principal = principal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AppConfigurationEntry[] getAppConfigurationEntry(final String name) {
|
||||
final Map<String, String> options = new HashMap<>();
|
||||
options.put("principal", principal);
|
||||
options.put("storeKey", "true");
|
||||
options.put("useTicketCache", "true");
|
||||
options.put("useKeyTab", "false");
|
||||
options.put("renewTGT", "true");
|
||||
options.put("refreshKrb5Config", "true");
|
||||
options.put("isInitiator", "true");
|
||||
options.put("debug", String.valueOf(debug));
|
||||
|
||||
return new AppConfigurationEntry[] { new AppConfigurationEntry(getKrb5LoginModuleName(),
|
||||
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, options) };
|
||||
}
|
||||
}
|
||||
|
||||
public static class KrbCallbackHandler implements CallbackHandler {
|
||||
private final String principal;
|
||||
private final String password;
|
||||
|
||||
public KrbCallbackHandler(final String principal, final String password) {
|
||||
this.principal = principal;
|
||||
this.password = password;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void handle(final Callback[] callbacks) throws IOException, UnsupportedCallbackException {
|
||||
for (int i = 0; i < callbacks.length; i++) {
|
||||
if (callbacks[i] instanceof PasswordCallback) {
|
||||
final PasswordCallback pc = (PasswordCallback) callbacks[i];
|
||||
if (pc.getPrompt().contains(principal)) {
|
||||
pc.setPassword(password.toCharArray());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,43 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.kerberos.util;
|
||||
|
||||
import org.ietf.jgss.GSSException;
|
||||
import org.ietf.jgss.Oid;
|
||||
|
||||
public final class KrbConstants {
|
||||
|
||||
static {
|
||||
Oid spnegoTmp = null;
|
||||
try {
|
||||
spnegoTmp = new Oid("1.3.6.1.5.5.2");
|
||||
} catch (final GSSException e) {
|
||||
|
||||
}
|
||||
SPNEGO = spnegoTmp;
|
||||
}
|
||||
|
||||
public static final Oid SPNEGO;
|
||||
public static final String KRB5_CONF_PROP = "java.security.krb5.conf";
|
||||
public static final String JAAS_LOGIN_CONF_PROP = "java.security.auth.login.config";
|
||||
public static final String USE_SUBJECT_CREDS_ONLY_PROP = "javax.security.auth.useSubjectCredsOnly";
|
||||
public static final String NEGOTIATE = "Negotiate";
|
||||
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
|
||||
|
||||
private KrbConstants() {
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,469 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.xml.parsers.ParserConfigurationException;
|
||||
import javax.xml.xpath.XPathExpressionException;
|
||||
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.cxf.jaxrs.json.basic.JsonMapObjectReaderWriter;
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
import org.apache.cxf.rs.security.jose.jwk.KeyType;
|
||||
import org.apache.cxf.rs.security.jose.jwk.PublicKeyUse;
|
||||
import org.apache.cxf.rs.security.jose.jws.JwsUtils;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JoseJwtProducer;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtClaims;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtToken;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.joda.time.DateTime;
|
||||
import org.xml.sax.SAXException;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.DefaultObjectMapper;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.api.AuthTokenProcessorAction;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.google.common.base.Strings;
|
||||
import com.onelogin.saml2.authn.SamlResponse;
|
||||
import com.onelogin.saml2.exception.SettingsException;
|
||||
import com.onelogin.saml2.exception.ValidationError;
|
||||
import com.onelogin.saml2.settings.Saml2Settings;
|
||||
import com.onelogin.saml2.util.Util;
|
||||
|
||||
class AuthTokenProcessorHandler {
|
||||
private static final Logger log = LogManager.getLogger(AuthTokenProcessorHandler.class);
|
||||
private static final Logger token_log = LogManager.getLogger("com.amazon.dlic.auth.http.saml.Token");
|
||||
private static final Pattern EXPIRY_SETTINGS_PATTERN = Pattern.compile("\\s*(\\w+)\\s*(?:\\+\\s*(\\w+))?\\s*");
|
||||
|
||||
private Saml2SettingsProvider saml2SettingsProvider;
|
||||
private JoseJwtProducer jwtProducer;
|
||||
private String jwtSubjectKey;
|
||||
private String jwtRolesKey;
|
||||
private String samlSubjectKey;
|
||||
private String samlRolesKey;
|
||||
private String samlRolesSeparator;
|
||||
private String kibanaRootUrl;
|
||||
|
||||
private long expiryOffset = 0;
|
||||
private ExpiryBaseValue expiryBaseValue = ExpiryBaseValue.AUTO;
|
||||
private JsonWebKey signingKey;
|
||||
private JsonMapObjectReaderWriter jsonMapReaderWriter = new JsonMapObjectReaderWriter();
|
||||
|
||||
AuthTokenProcessorHandler(Settings settings, Settings jwtSettings, Saml2SettingsProvider saml2SettingsProvider)
|
||||
throws Exception {
|
||||
this.saml2SettingsProvider = saml2SettingsProvider;
|
||||
|
||||
this.jwtRolesKey = jwtSettings.get("roles_key", "roles");
|
||||
this.jwtSubjectKey = jwtSettings.get("subject_key", "sub");
|
||||
|
||||
this.samlRolesKey = settings.get("roles_key");
|
||||
this.samlSubjectKey = settings.get("subject_key");
|
||||
this.samlRolesSeparator = settings.get("roles_seperator");
|
||||
this.kibanaRootUrl = settings.get("kibana_url");
|
||||
|
||||
if (samlRolesKey == null || samlRolesKey.length() == 0) {
|
||||
log.warn("roles_key is not configured, will only extract subject from SAML");
|
||||
samlRolesKey = null;
|
||||
}
|
||||
|
||||
if (samlSubjectKey == null || samlSubjectKey.length() == 0) {
|
||||
// If subjectKey == null, get subject from the NameID element.
|
||||
// Thus, this is a valid configuration.
|
||||
samlSubjectKey = null;
|
||||
}
|
||||
|
||||
if (samlRolesSeparator == null || samlRolesSeparator.length() == 0) {
|
||||
samlRolesSeparator = null;
|
||||
}
|
||||
|
||||
this.initJwtExpirySettings(settings);
|
||||
this.signingKey = this.createJwkFromSettings(settings, jwtSettings);
|
||||
|
||||
this.jwtProducer = new JoseJwtProducer();
|
||||
this.jwtProducer.setSignatureProvider(JwsUtils.getSignatureProvider(this.signingKey));
|
||||
|
||||
}
|
||||
|
||||
boolean handle(RestRequest restRequest, RestChannel restChannel) throws Exception {
|
||||
try {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<Boolean>() {
|
||||
@Override
|
||||
public Boolean run() throws XPathExpressionException, SamlConfigException, IOException,
|
||||
ParserConfigurationException, SAXException, SettingsException {
|
||||
return handleLowLevel(restRequest, restChannel);
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getCause() instanceof Exception) {
|
||||
throw (Exception) e.getCause();
|
||||
} else {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private AuthTokenProcessorAction.Response handleImpl(RestRequest restRequest, RestChannel restChannel,
|
||||
String samlResponseBase64, String samlRequestId, String acsEndpoint, Saml2Settings saml2Settings)
|
||||
throws XPathExpressionException, ParserConfigurationException, SAXException, IOException,
|
||||
SettingsException {
|
||||
if (token_log.isDebugEnabled()) {
|
||||
try {
|
||||
token_log.debug("SAMLResponse for " + samlRequestId + "\n"
|
||||
+ new String(Util.base64decoder(samlResponseBase64), "UTF-8"));
|
||||
} catch (Exception e) {
|
||||
token_log.warn(
|
||||
"SAMLResponse for " + samlRequestId + " cannot be decoded from base64\n" + samlResponseBase64,
|
||||
e);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
SamlResponse samlResponse = new SamlResponse(saml2Settings, null);
|
||||
samlResponse.setDestinationUrl(acsEndpoint);
|
||||
samlResponse.loadXmlFromBase64(samlResponseBase64);
|
||||
|
||||
if (!samlResponse.isValid(samlRequestId)) {
|
||||
log.warn("Error while validating SAML response in /_opendistro/_security/api/authtoken");
|
||||
return null;
|
||||
}
|
||||
|
||||
AuthTokenProcessorAction.Response responseBody = new AuthTokenProcessorAction.Response();
|
||||
responseBody.setAuthorization("bearer " + this.createJwt(samlResponse));
|
||||
|
||||
return responseBody;
|
||||
} catch (ValidationError e) {
|
||||
log.warn("Error while validating SAML response", e);
|
||||
return null;
|
||||
} catch (Exception e) {
|
||||
log.error("Error while converting SAML to JWT", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private boolean handleLowLevel(RestRequest restRequest, RestChannel restChannel) throws SamlConfigException,
|
||||
IOException, XPathExpressionException, ParserConfigurationException, SAXException, SettingsException {
|
||||
try {
|
||||
|
||||
if (restRequest.getXContentType() != XContentType.JSON) {
|
||||
throw new ElasticsearchSecurityException(
|
||||
"/_opendistro/_security/api/authtoken expects content with type application/json",
|
||||
RestStatus.UNSUPPORTED_MEDIA_TYPE);
|
||||
|
||||
}
|
||||
|
||||
if (restRequest.method() != Method.POST) {
|
||||
throw new ElasticsearchSecurityException("/_opendistro/_security/api/authtoken expects POST requests",
|
||||
RestStatus.METHOD_NOT_ALLOWED);
|
||||
}
|
||||
|
||||
Saml2Settings saml2Settings = this.saml2SettingsProvider.getCached();
|
||||
|
||||
BytesReference bytesReference = restRequest.requiredContent();
|
||||
|
||||
JsonNode jsonRoot = DefaultObjectMapper.objectMapper.readTree(BytesReference.toBytes(bytesReference));
|
||||
|
||||
if (!(jsonRoot instanceof ObjectNode)) {
|
||||
throw new JsonParseException(null, "Unexpected json format: " + jsonRoot);
|
||||
}
|
||||
|
||||
if (((ObjectNode) jsonRoot).get("SAMLResponse") == null) {
|
||||
log.warn("SAMLResponse is missing from request ");
|
||||
|
||||
throw new ElasticsearchSecurityException("SAMLResponse is missing from request",
|
||||
RestStatus.BAD_REQUEST);
|
||||
|
||||
}
|
||||
|
||||
String samlResponseBase64 = ((ObjectNode) jsonRoot).get("SAMLResponse").asText();
|
||||
String samlRequestId = ((ObjectNode) jsonRoot).get("RequestId") != null
|
||||
? ((ObjectNode) jsonRoot).get("RequestId").textValue()
|
||||
: null;
|
||||
String acsEndpoint = saml2Settings.getSpAssertionConsumerServiceUrl().toString();
|
||||
|
||||
if (((ObjectNode) jsonRoot).get("acsEndpoint") != null
|
||||
&& ((ObjectNode) jsonRoot).get("acsEndpoint").textValue() != null) {
|
||||
acsEndpoint = getAbsoluteAcsEndpoint(((ObjectNode) jsonRoot).get("acsEndpoint").textValue());
|
||||
}
|
||||
|
||||
AuthTokenProcessorAction.Response responseBody = this.handleImpl(restRequest, restChannel,
|
||||
samlResponseBase64, samlRequestId, acsEndpoint, saml2Settings);
|
||||
|
||||
if (responseBody == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String responseBodyString = DefaultObjectMapper.objectMapper.writeValueAsString(responseBody);
|
||||
|
||||
BytesRestResponse authenticateResponse = new BytesRestResponse(RestStatus.OK, "application/json",
|
||||
responseBodyString);
|
||||
restChannel.sendResponse(authenticateResponse);
|
||||
|
||||
return true;
|
||||
} catch (JsonProcessingException e) {
|
||||
log.warn("Error while parsing JSON for /_opendistro/_security/api/authtoken", e);
|
||||
|
||||
BytesRestResponse authenticateResponse = new BytesRestResponse(RestStatus.BAD_REQUEST,
|
||||
"JSON could not be parsed");
|
||||
restChannel.sendResponse(authenticateResponse);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
JsonWebKey createJwkFromSettings(Settings settings, Settings jwtSettings) throws Exception {
|
||||
|
||||
String exchangeKey = settings.get("exchange_key");
|
||||
|
||||
if (!Strings.isNullOrEmpty(exchangeKey)) {
|
||||
|
||||
JsonWebKey jwk = new JsonWebKey();
|
||||
|
||||
jwk.setKeyType(KeyType.OCTET);
|
||||
jwk.setAlgorithm("HS512");
|
||||
jwk.setPublicKeyUse(PublicKeyUse.SIGN);
|
||||
jwk.setProperty("k", exchangeKey);
|
||||
|
||||
return jwk;
|
||||
} else {
|
||||
|
||||
Settings jwkSettings = jwtSettings.getAsSettings("key");
|
||||
|
||||
if (jwkSettings.isEmpty()) {
|
||||
throw new Exception(
|
||||
"Settings for key exchange missing. Please specify at least the option exchange_key with a shared secret.");
|
||||
}
|
||||
|
||||
JsonWebKey jwk = new JsonWebKey();
|
||||
|
||||
for (String key : jwkSettings.keySet()) {
|
||||
jwk.setProperty(key, jwkSettings.get(key));
|
||||
}
|
||||
|
||||
return jwk;
|
||||
}
|
||||
}
|
||||
|
||||
private String createJwt(SamlResponse samlResponse) throws Exception {
|
||||
JwtClaims jwtClaims = new JwtClaims();
|
||||
JwtToken jwt = new JwtToken(jwtClaims);
|
||||
|
||||
jwtClaims.setNotBefore(System.currentTimeMillis() / 1000);
|
||||
jwtClaims.setExpiryTime(getJwtExpiration(samlResponse));
|
||||
|
||||
jwtClaims.setProperty(this.jwtSubjectKey, this.extractSubject(samlResponse));
|
||||
|
||||
if (this.samlSubjectKey != null) {
|
||||
jwtClaims.setProperty("saml_ni", samlResponse.getNameId());
|
||||
}
|
||||
|
||||
if (samlResponse.getNameIdFormat() != null) {
|
||||
jwtClaims.setProperty("saml_nif", SamlNameIdFormat.getByUri(samlResponse.getNameIdFormat()).getShortName());
|
||||
}
|
||||
|
||||
String sessionIndex = samlResponse.getSessionIndex();
|
||||
|
||||
if (sessionIndex != null) {
|
||||
jwtClaims.setProperty("saml_si", sessionIndex);
|
||||
}
|
||||
|
||||
if (this.samlRolesKey != null && this.jwtRolesKey != null) {
|
||||
String[] roles = this.extractRoles(samlResponse);
|
||||
|
||||
jwtClaims.setProperty(this.jwtRolesKey, roles);
|
||||
}
|
||||
|
||||
String encodedJwt = this.jwtProducer.processJwt(jwt);
|
||||
|
||||
if (token_log.isDebugEnabled()) {
|
||||
token_log.debug("Created JWT: " + encodedJwt + "\n" + jsonMapReaderWriter.toJson(jwt.getJwsHeaders()) + "\n"
|
||||
+ JwtUtils.claimsToJson(jwt.getClaims()));
|
||||
}
|
||||
|
||||
return encodedJwt;
|
||||
}
|
||||
|
||||
private long getJwtExpiration(SamlResponse samlResponse) throws Exception {
|
||||
DateTime sessionNotOnOrAfter = samlResponse.getSessionNotOnOrAfter();
|
||||
|
||||
if (this.expiryBaseValue == ExpiryBaseValue.NOW) {
|
||||
return System.currentTimeMillis() / 1000 + this.expiryOffset;
|
||||
} else if (this.expiryBaseValue == ExpiryBaseValue.SESSION) {
|
||||
if (sessionNotOnOrAfter != null) {
|
||||
return sessionNotOnOrAfter.getMillis() / 1000 + this.expiryOffset;
|
||||
} else {
|
||||
throw new Exception(
|
||||
"Error while determining JWT expiration time: SamlResponse did not contain sessionNotOnOrAfter value");
|
||||
}
|
||||
} else {
|
||||
// AUTO
|
||||
|
||||
if (sessionNotOnOrAfter != null) {
|
||||
return sessionNotOnOrAfter.getMillis() / 1000;
|
||||
} else {
|
||||
return System.currentTimeMillis() / 1000 + (this.expiryOffset > 0 ? this.expiryOffset : 60 * 60);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void initJwtExpirySettings(Settings settings) {
|
||||
String expiry = settings.get("jwt.expiry");
|
||||
|
||||
if (Strings.isNullOrEmpty(expiry)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Matcher matcher = EXPIRY_SETTINGS_PATTERN.matcher(expiry);
|
||||
|
||||
if (!matcher.matches()) {
|
||||
log.error("Invalid value for jwt.expiry: " + expiry + "; using defaults.");
|
||||
return;
|
||||
}
|
||||
|
||||
String baseValue = matcher.group(1);
|
||||
String offset = matcher.group(2);
|
||||
|
||||
if (offset != null && !StringUtils.isNumeric(offset)) {
|
||||
log.error("Invalid offset value for jwt.expiry: " + expiry + "; using defaults.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!Strings.isNullOrEmpty(baseValue)) {
|
||||
try {
|
||||
this.expiryBaseValue = ExpiryBaseValue.valueOf(baseValue.toUpperCase());
|
||||
} catch (IllegalArgumentException e) {
|
||||
log.error("Invalid base value for jwt.expiry: " + expiry + "; using defaults");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (offset != null) {
|
||||
this.expiryOffset = Integer.parseInt(offset) * 60;
|
||||
}
|
||||
}
|
||||
|
||||
private String extractSubject(SamlResponse samlResponse) throws Exception {
|
||||
if (this.samlSubjectKey == null) {
|
||||
return samlResponse.getNameId();
|
||||
}
|
||||
|
||||
List<String> values = samlResponse.getAttributes().get(this.samlSubjectKey);
|
||||
|
||||
if (values == null || values.size() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return values.get(0);
|
||||
}
|
||||
|
||||
private String[] extractRoles(SamlResponse samlResponse) throws XPathExpressionException, ValidationError {
|
||||
if (this.samlRolesKey == null) {
|
||||
return new String[0];
|
||||
}
|
||||
|
||||
List<String> values = samlResponse.getAttributes().get(this.samlRolesKey);
|
||||
|
||||
if (values == null || values.size() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (samlRolesSeparator != null) {
|
||||
values = splitRoles(values);
|
||||
} else {
|
||||
values = trimRoles(values);
|
||||
}
|
||||
|
||||
return values.toArray(new String[values.size()]);
|
||||
}
|
||||
|
||||
private List<String> splitRoles(List<String> values) {
|
||||
ArrayList<String> result = new ArrayList<String>(values.size() * 5);
|
||||
|
||||
for (String role : values) {
|
||||
if (role != null) {
|
||||
for (String splitRole : role.split(samlRolesSeparator)) {
|
||||
result.add(splitRole.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private List<String> trimRoles(List<String> values) {
|
||||
ArrayList<String> result = new ArrayList<>(values);
|
||||
|
||||
for (int i = 0; i < result.size(); i++) {
|
||||
if (result.get(i) != null) {
|
||||
result.set(i, result.get(i).trim());
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private String getAbsoluteAcsEndpoint(String acsEndpoint) {
|
||||
try {
|
||||
URI acsEndpointUri = new URI(acsEndpoint);
|
||||
|
||||
if (acsEndpointUri.isAbsolute()) {
|
||||
return acsEndpoint;
|
||||
} else {
|
||||
return new URI(this.kibanaRootUrl).resolve(acsEndpointUri).toString();
|
||||
}
|
||||
} catch (URISyntaxException e) {
|
||||
log.error("Could not parse URI for acsEndpoint: " + acsEndpoint);
|
||||
return acsEndpoint;
|
||||
}
|
||||
}
|
||||
|
||||
private enum ExpiryBaseValue {
|
||||
AUTO, NOW, SESSION
|
||||
}
|
||||
|
||||
public JsonWebKey getSigningKey() {
|
||||
return signingKey;
|
||||
}
|
||||
}
|
@ -0,0 +1,431 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivateKey;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
import org.apache.commons.lang3.StringEscapeUtils;
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.opensaml.core.config.InitializationException;
|
||||
import org.opensaml.core.config.InitializationService;
|
||||
import org.opensaml.saml.metadata.resolver.MetadataResolver;
|
||||
import org.opensaml.saml.metadata.resolver.impl.AbstractReloadingMetadataResolver;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.AbstractHTTPJwtAuthenticator;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.AuthenticatorUnavailableException;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.BadCredentialsException;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.KeyProvider;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.Destroyable;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.HTTPAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.PemKeyReader;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.google.common.base.Strings;
|
||||
import com.onelogin.saml2.authn.AuthnRequest;
|
||||
import com.onelogin.saml2.logout.LogoutRequest;
|
||||
import com.onelogin.saml2.settings.Saml2Settings;
|
||||
import com.onelogin.saml2.util.Constants;
|
||||
import com.onelogin.saml2.util.Util;
|
||||
|
||||
import net.shibboleth.utilities.java.support.component.ComponentInitializationException;
|
||||
import net.shibboleth.utilities.java.support.component.DestructableComponent;
|
||||
|
||||
public class HTTPSamlAuthenticator implements HTTPAuthenticator, Destroyable {
|
||||
protected final static Logger log = LogManager.getLogger(HTTPSamlAuthenticator.class);
|
||||
private static boolean openSamlInitialized = false;
|
||||
|
||||
private String subjectKey;
|
||||
private String rolesKey;
|
||||
private String kibanaRootUrl;
|
||||
private String idpMetadataUrl;
|
||||
private String idpMetadataFile;
|
||||
private String spSignatureAlgorithm;
|
||||
private Boolean useForceAuthn;
|
||||
private PrivateKey spSignaturePrivateKey;
|
||||
private Saml2SettingsProvider saml2SettingsProvider;
|
||||
private MetadataResolver metadataResolver;
|
||||
private AuthTokenProcessorHandler authTokenProcessorHandler;
|
||||
private HTTPJwtAuthenticator httpJwtAuthenticator;
|
||||
private Settings jwtSettings;
|
||||
|
||||
public HTTPSamlAuthenticator(final Settings settings, final Path configPath) {
|
||||
try {
|
||||
ensureOpenSamlInitialization();
|
||||
|
||||
rolesKey = settings.get("roles_key");
|
||||
subjectKey = settings.get("subject_key");
|
||||
kibanaRootUrl = settings.get("kibana_url");
|
||||
idpMetadataUrl = settings.get("idp.metadata_url");
|
||||
idpMetadataFile = settings.get("idp.metadata_file");
|
||||
spSignatureAlgorithm = settings.get("sp.signature_algorithm", Constants.RSA_SHA256);
|
||||
spSignaturePrivateKey = getSpSignaturePrivateKey(settings, configPath);
|
||||
useForceAuthn = settings.getAsBoolean("sp.forceAuthn", null);
|
||||
|
||||
if (rolesKey == null || rolesKey.length() == 0) {
|
||||
log.warn("roles_key is not configured, will only extract subject from SAML");
|
||||
rolesKey = null;
|
||||
}
|
||||
|
||||
if (subjectKey == null || subjectKey.length() == 0) {
|
||||
// If subjectKey == null, get subject from the NameID element.
|
||||
// Thus, this is a valid configuration.
|
||||
subjectKey = null;
|
||||
}
|
||||
|
||||
if (kibanaRootUrl == null) {
|
||||
throw new Exception("kibana_url is unconfigured");
|
||||
}
|
||||
|
||||
if (idpMetadataUrl == null && idpMetadataFile == null) {
|
||||
throw new Exception("idp.metadata_url and idp.metadata_file are unconfigured");
|
||||
}
|
||||
|
||||
this.metadataResolver = createMetadataResolver(settings, configPath);
|
||||
|
||||
this.saml2SettingsProvider = new Saml2SettingsProvider(settings, this.metadataResolver);
|
||||
this.saml2SettingsProvider.getCached();
|
||||
|
||||
this.jwtSettings = this.createJwtAuthenticatorSettings(settings);
|
||||
|
||||
this.authTokenProcessorHandler = new AuthTokenProcessorHandler(settings, jwtSettings,
|
||||
this.saml2SettingsProvider);
|
||||
|
||||
this.httpJwtAuthenticator = new HTTPJwtAuthenticator(this.jwtSettings, configPath);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Error creating HTTPSamlAuthenticator: " + e + ". SAML authentication will not work", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public AuthCredentials extractCredentials(RestRequest restRequest, ThreadContext threadContext)
|
||||
throws ElasticsearchSecurityException {
|
||||
if ("/_opendistro/_security/api/authtoken".equals(restRequest.path())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
AuthCredentials authCredentials = this.httpJwtAuthenticator.extractCredentials(restRequest, threadContext);
|
||||
|
||||
if ("/_opendistro/_security/authinfo".equals(restRequest.path())) {
|
||||
this.initLogoutUrl(restRequest, threadContext, authCredentials);
|
||||
}
|
||||
|
||||
return authCredentials;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "saml";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean reRequestAuthentication(RestChannel restChannel, AuthCredentials authCredentials) {
|
||||
try {
|
||||
RestRequest restRequest = restChannel.request();
|
||||
|
||||
if ("/_opendistro/_security/api/authtoken".equals(restRequest.path())
|
||||
&& this.authTokenProcessorHandler.handle(restRequest, restChannel)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
Saml2Settings saml2Settings = this.saml2SettingsProvider.getCached();
|
||||
BytesRestResponse authenticateResponse = new BytesRestResponse(RestStatus.UNAUTHORIZED, "");
|
||||
|
||||
authenticateResponse.addHeader("WWW-Authenticate", getWwwAuthenticateHeader(saml2Settings));
|
||||
|
||||
restChannel.sendResponse(authenticateResponse);
|
||||
|
||||
return true;
|
||||
} catch (Exception e) {
|
||||
log.error("Error in reRequestAuthentication()", e);
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private String getWwwAuthenticateHeader(Saml2Settings saml2Settings) throws Exception {
|
||||
AuthnRequest authnRequest = this.buildAuthnRequest(saml2Settings);
|
||||
|
||||
return "X-Security-IdP realm=\"Open Distro Security\" location=\""
|
||||
+ StringEscapeUtils.escapeJava(getSamlRequestRedirectBindingLocation(IdpEndpointType.SSO, saml2Settings,
|
||||
authnRequest.getEncodedAuthnRequest(true)))
|
||||
+ "\" requestId=\"" + StringEscapeUtils.escapeJava(authnRequest.getId()) + "\"";
|
||||
}
|
||||
|
||||
private AuthnRequest buildAuthnRequest(Saml2Settings saml2Settings) {
|
||||
boolean forceAuthn = false;
|
||||
|
||||
if (this.useForceAuthn != null) {
|
||||
forceAuthn = this.useForceAuthn.booleanValue();
|
||||
} else {
|
||||
if (!this.isSingleLogoutAvailable(saml2Settings)) {
|
||||
forceAuthn = true;
|
||||
}
|
||||
}
|
||||
|
||||
return new AuthnRequest(saml2Settings, forceAuthn, false, true);
|
||||
}
|
||||
|
||||
private PrivateKey getSpSignaturePrivateKey(Settings settings, Path configPath) throws Exception {
|
||||
try {
|
||||
PrivateKey result = PemKeyReader.loadKeyFromStream(settings.get("sp.signature_private_key_password"),
|
||||
PemKeyReader.resolveStream("sp.signature_private_key", settings));
|
||||
|
||||
if (result == null) {
|
||||
result = PemKeyReader.loadKeyFromFile(settings.get("sp.signature_private_key_password"),
|
||||
PemKeyReader.resolve("sp.signature_private_key_filepath", settings, configPath, false));
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
throw new Exception("Invalid value for sp.signature_private_key", e);
|
||||
}
|
||||
}
|
||||
|
||||
private URL getIdpUrl(IdpEndpointType endpointType, Saml2Settings saml2Settings) {
|
||||
if (endpointType == IdpEndpointType.SSO) {
|
||||
return saml2Settings.getIdpSingleSignOnServiceUrl();
|
||||
} else {
|
||||
return saml2Settings.getIdpSingleLogoutServiceUrl();
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isSingleLogoutAvailable(Saml2Settings saml2Settings) {
|
||||
return saml2Settings.getIdpSingleLogoutServiceUrl() != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy() {
|
||||
if (this.metadataResolver instanceof DestructableComponent) {
|
||||
((DestructableComponent) this.metadataResolver).destroy();
|
||||
}
|
||||
}
|
||||
|
||||
static void ensureOpenSamlInitialization() {
|
||||
if (openSamlInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() {
|
||||
@Override
|
||||
public Void run() throws InitializationException {
|
||||
|
||||
Thread thread = Thread.currentThread();
|
||||
ClassLoader originalClassLoader = thread.getContextClassLoader();
|
||||
|
||||
try {
|
||||
|
||||
thread.setContextClassLoader(InitializationService.class.getClassLoader());
|
||||
|
||||
InitializationService.initialize();
|
||||
|
||||
new org.opensaml.saml.config.XMLObjectProviderInitializer().init();
|
||||
new org.opensaml.saml.config.SAMLConfigurationInitializer().init();
|
||||
new org.opensaml.xmlsec.config.XMLObjectProviderInitializer().init();
|
||||
} finally {
|
||||
thread.setContextClassLoader(originalClassLoader);
|
||||
}
|
||||
|
||||
openSamlInitialized = true;
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
throw new RuntimeException(e.getCause());
|
||||
}
|
||||
}
|
||||
|
||||
private AbstractReloadingMetadataResolver createMetadataResolver(final Settings settings, final Path configPath)
|
||||
throws Exception {
|
||||
final AbstractReloadingMetadataResolver metadataResolver;
|
||||
|
||||
if (idpMetadataUrl != null) {
|
||||
metadataResolver = new SamlHTTPMetadataResolver(settings, configPath);
|
||||
} else {
|
||||
metadataResolver = new SamlFilesystemMetadataResolver(settings, configPath);
|
||||
}
|
||||
|
||||
SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() {
|
||||
@Override
|
||||
public Void run() throws ComponentInitializationException {
|
||||
metadataResolver.initialize();
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getCause() instanceof ComponentInitializationException) {
|
||||
throw (ComponentInitializationException) e.getCause();
|
||||
} else {
|
||||
throw new RuntimeException(e.getCause());
|
||||
}
|
||||
}
|
||||
|
||||
return metadataResolver;
|
||||
|
||||
}
|
||||
|
||||
private Settings createJwtAuthenticatorSettings(Settings settings) {
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
Settings jwtSettings = settings.getAsSettings("jwt");
|
||||
|
||||
settingsBuilder.put(jwtSettings);
|
||||
|
||||
if (jwtSettings.get("roles_key") == null && settings.get("roles_key") != null) {
|
||||
settingsBuilder.put("roles_key", "roles");
|
||||
}
|
||||
|
||||
if (jwtSettings.get("subject_key") == null) {
|
||||
settingsBuilder.put("subject_key", "sub");
|
||||
}
|
||||
|
||||
return settingsBuilder.build();
|
||||
}
|
||||
|
||||
String buildLogoutUrl(AuthCredentials authCredentials) {
|
||||
try {
|
||||
if (authCredentials == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Saml2Settings saml2Settings = this.saml2SettingsProvider.getCached();
|
||||
|
||||
if (!isSingleLogoutAvailable(saml2Settings)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String nameIdClaim = this.subjectKey == null ? "sub" : "saml_ni";
|
||||
String nameId = authCredentials.getAttributes().get("attr.jwt." + nameIdClaim);
|
||||
String nameIdFormat = SamlNameIdFormat
|
||||
.getByShortName(authCredentials.getAttributes().get("attr.jwt.saml_nif")).getUri();
|
||||
String sessionIndex = authCredentials.getAttributes().get("attr.jwt.saml_si");
|
||||
|
||||
LogoutRequest logoutRequest = new LogoutRequest(saml2Settings, null, nameId, sessionIndex, nameIdFormat);
|
||||
|
||||
return getSamlRequestRedirectBindingLocation(IdpEndpointType.SLO, saml2Settings,
|
||||
logoutRequest.getEncodedLogoutRequest(true));
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Error while creating logout URL. Logout will be not available", e);
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void initLogoutUrl(RestRequest restRequest, ThreadContext threadContext, AuthCredentials authCredentials) {
|
||||
threadContext.putTransient(ConfigConstants.SSO_LOGOUT_URL, buildLogoutUrl(authCredentials));
|
||||
}
|
||||
|
||||
private String getSamlRequestRedirectBindingLocation(IdpEndpointType idpEndpointType, Saml2Settings saml2Settings,
|
||||
String samlRequest) throws Exception {
|
||||
|
||||
URL idpUrl = getIdpUrl(idpEndpointType, saml2Settings);
|
||||
|
||||
if (Strings.isNullOrEmpty(idpUrl.getQuery())) {
|
||||
return getIdpUrl(idpEndpointType, saml2Settings) + "?" + this.getSamlRequestQueryString(samlRequest);
|
||||
} else {
|
||||
return getIdpUrl(idpEndpointType, saml2Settings) + "&" + this.getSamlRequestQueryString(samlRequest);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String getSamlRequestQueryString(String samlRequest) throws Exception {
|
||||
|
||||
if (this.spSignaturePrivateKey == null) {
|
||||
return "SAMLRequest=" + Util.urlEncoder(samlRequest);
|
||||
}
|
||||
|
||||
String queryString = "SAMLRequest=" + Util.urlEncoder(samlRequest) + "&SigAlg="
|
||||
+ Util.urlEncoder(this.spSignatureAlgorithm);
|
||||
|
||||
String signature = getSamlRequestQueryStringSignature(queryString);
|
||||
|
||||
queryString += "&Signature=" + Util.urlEncoder(signature);
|
||||
|
||||
return queryString;
|
||||
}
|
||||
|
||||
private String getSamlRequestQueryStringSignature(String samlRequestQueryString) throws Exception {
|
||||
try {
|
||||
return Util.base64encoder(
|
||||
Util.sign(samlRequestQueryString, this.spSignaturePrivateKey, this.spSignatureAlgorithm));
|
||||
} catch (Exception e) {
|
||||
throw new Exception("Error while signing SAML request", e);
|
||||
}
|
||||
}
|
||||
|
||||
class HTTPJwtAuthenticator extends AbstractHTTPJwtAuthenticator {
|
||||
|
||||
public HTTPJwtAuthenticator(Settings settings, Path configPath) {
|
||||
super(settings, configPath);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected KeyProvider initKeyProvider(Settings settings, Path configPath) throws Exception {
|
||||
return new KeyProvider() {
|
||||
|
||||
@Override
|
||||
public JsonWebKey getKeyAfterRefresh(String kid)
|
||||
throws AuthenticatorUnavailableException, BadCredentialsException {
|
||||
return authTokenProcessorHandler.getSigningKey();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JsonWebKey getKey(String kid) throws AuthenticatorUnavailableException, BadCredentialsException {
|
||||
return authTokenProcessorHandler.getSigningKey();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private enum IdpEndpointType {
|
||||
SSO, SLO
|
||||
}
|
||||
}
|
@ -0,0 +1,312 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
import java.util.AbstractMap;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.joda.time.DateTime;
|
||||
import org.opensaml.core.criterion.EntityIdCriterion;
|
||||
import org.opensaml.saml.metadata.resolver.MetadataResolver;
|
||||
import org.opensaml.saml.metadata.resolver.RefreshableMetadataResolver;
|
||||
import org.opensaml.saml.saml2.metadata.EntityDescriptor;
|
||||
import org.opensaml.saml.saml2.metadata.IDPSSODescriptor;
|
||||
import org.opensaml.saml.saml2.metadata.KeyDescriptor;
|
||||
import org.opensaml.saml.saml2.metadata.SingleLogoutService;
|
||||
import org.opensaml.saml.saml2.metadata.SingleSignOnService;
|
||||
import org.opensaml.security.credential.UsageType;
|
||||
import org.opensaml.xmlsec.signature.X509Certificate;
|
||||
import org.opensaml.xmlsec.signature.X509Data;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.AuthenticatorUnavailableException;
|
||||
import com.onelogin.saml2.settings.Saml2Settings;
|
||||
import com.onelogin.saml2.settings.SettingsBuilder;
|
||||
|
||||
import net.shibboleth.utilities.java.support.resolver.CriteriaSet;
|
||||
import net.shibboleth.utilities.java.support.resolver.ResolverException;
|
||||
|
||||
public class Saml2SettingsProvider {
|
||||
protected final static Logger log = LogManager.getLogger(Saml2SettingsProvider.class);
|
||||
|
||||
private Settings esSettings;
|
||||
private MetadataResolver metadataResolver;
|
||||
private String idpEntityId;
|
||||
private Saml2Settings cachedSaml2Settings;
|
||||
private DateTime metadataUpdateTime;
|
||||
|
||||
Saml2SettingsProvider(Settings esSettings, MetadataResolver metadataResolver) {
|
||||
this.esSettings = esSettings;
|
||||
this.metadataResolver = metadataResolver;
|
||||
this.idpEntityId = esSettings.get("idp.entity_id");
|
||||
}
|
||||
|
||||
Saml2Settings get() throws SamlConfigException {
|
||||
try {
|
||||
HashMap<String, Object> configProperties = new HashMap<>();
|
||||
|
||||
EntityDescriptor entityDescriptor = this.metadataResolver
|
||||
.resolveSingle(new CriteriaSet(new EntityIdCriterion(this.idpEntityId)));
|
||||
|
||||
if (entityDescriptor == null) {
|
||||
throw new SamlConfigException("Could not find entity descriptor for " + this.idpEntityId);
|
||||
}
|
||||
|
||||
IDPSSODescriptor idpSsoDescriptor = entityDescriptor
|
||||
.getIDPSSODescriptor("urn:oasis:names:tc:SAML:2.0:protocol");
|
||||
|
||||
if (idpSsoDescriptor == null) {
|
||||
throw new SamlConfigException("Could not find IDPSSODescriptor supporting SAML 2.0 in "
|
||||
+ this.idpEntityId + "; role descriptors: " + entityDescriptor.getRoleDescriptors());
|
||||
}
|
||||
|
||||
initIdpEndpoints(idpSsoDescriptor, configProperties);
|
||||
initIdpCerts(idpSsoDescriptor, configProperties);
|
||||
|
||||
initSpEndpoints(configProperties);
|
||||
|
||||
initMisc(configProperties);
|
||||
|
||||
SettingsBuilder settingsBuilder = new SettingsBuilder();
|
||||
|
||||
// TODO allow overriding of IdP metadata?
|
||||
settingsBuilder.fromValues(configProperties);
|
||||
settingsBuilder.fromValues(new SamlSettingsMap(this.esSettings));
|
||||
|
||||
return settingsBuilder.build();
|
||||
} catch (ResolverException e) {
|
||||
throw new AuthenticatorUnavailableException(e);
|
||||
}
|
||||
}
|
||||
|
||||
Saml2Settings getCached() throws SamlConfigException {
|
||||
DateTime tempLastUpdate = null;
|
||||
|
||||
if (this.metadataResolver instanceof RefreshableMetadataResolver && this.isUpdateRequired()) {
|
||||
this.cachedSaml2Settings = null;
|
||||
tempLastUpdate = ((RefreshableMetadataResolver) this.metadataResolver).getLastUpdate();
|
||||
}
|
||||
|
||||
if (this.cachedSaml2Settings == null) {
|
||||
this.cachedSaml2Settings = this.get();
|
||||
this.metadataUpdateTime = tempLastUpdate;
|
||||
}
|
||||
|
||||
return this.cachedSaml2Settings;
|
||||
}
|
||||
|
||||
private boolean isUpdateRequired() {
|
||||
RefreshableMetadataResolver refreshableMetadataResolver = (RefreshableMetadataResolver) this.metadataResolver;
|
||||
|
||||
if (this.cachedSaml2Settings == null || this.metadataUpdateTime == null
|
||||
|| refreshableMetadataResolver.getLastUpdate() == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (refreshableMetadataResolver.getLastUpdate().isAfter(this.metadataUpdateTime)) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private void initMisc(HashMap<String, Object> configProperties) {
|
||||
configProperties.put(SettingsBuilder.STRICT_PROPERTY_KEY, true);
|
||||
configProperties.put(SettingsBuilder.SECURITY_REJECT_UNSOLICITED_RESPONSES_WITH_INRESPONSETO, true);
|
||||
}
|
||||
|
||||
private void initSpEndpoints(HashMap<String, Object> configProperties) {
|
||||
configProperties.put(SettingsBuilder.SP_ASSERTION_CONSUMER_SERVICE_URL_PROPERTY_KEY,
|
||||
this.buildKibanaAssertionConsumerEndpoint(this.esSettings.get("kibana_url")));
|
||||
configProperties.put(SettingsBuilder.SP_ASSERTION_CONSUMER_SERVICE_BINDING_PROPERTY_KEY,
|
||||
"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST");
|
||||
configProperties.put(SettingsBuilder.SP_ENTITYID_PROPERTY_KEY, this.esSettings.get("sp.entity_id"));
|
||||
}
|
||||
|
||||
private void initIdpEndpoints(IDPSSODescriptor idpSsoDescriptor, HashMap<String, Object> configProperties)
|
||||
throws SamlConfigException {
|
||||
SingleSignOnService singleSignOnService = this.findSingleSignOnService(idpSsoDescriptor,
|
||||
"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect");
|
||||
|
||||
configProperties.put(SettingsBuilder.IDP_SINGLE_SIGN_ON_SERVICE_URL_PROPERTY_KEY,
|
||||
singleSignOnService.getLocation());
|
||||
configProperties.put(SettingsBuilder.IDP_SINGLE_SIGN_ON_SERVICE_BINDING_PROPERTY_KEY,
|
||||
singleSignOnService.getBinding());
|
||||
configProperties.put(SettingsBuilder.IDP_ENTITYID_PROPERTY_KEY, this.esSettings.get("idp.entity_id"));
|
||||
|
||||
SingleLogoutService singleLogoutService = this.findSingleLogoutService(idpSsoDescriptor,
|
||||
"urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect");
|
||||
|
||||
if (singleLogoutService != null) {
|
||||
configProperties.put(SettingsBuilder.IDP_SINGLE_LOGOUT_SERVICE_URL_PROPERTY_KEY,
|
||||
singleLogoutService.getLocation());
|
||||
configProperties.put(SettingsBuilder.IDP_SINGLE_LOGOUT_SERVICE_BINDING_PROPERTY_KEY,
|
||||
singleLogoutService.getBinding());
|
||||
} else {
|
||||
log.warn(
|
||||
"The IdP does not provide a Single Logout Service. In order to ensure that users have to re-enter their password after logging out, Open Distro Security will issue all SAML authentication requests with a mandatory password input (ForceAuthn=true)");
|
||||
}
|
||||
}
|
||||
|
||||
private void initIdpCerts(IDPSSODescriptor idpSsoDescriptor, HashMap<String, Object> configProperties) {
|
||||
int i = 0;
|
||||
|
||||
for (KeyDescriptor keyDescriptor : idpSsoDescriptor.getKeyDescriptors()) {
|
||||
if (UsageType.SIGNING.equals(keyDescriptor.getUse())
|
||||
|| UsageType.UNSPECIFIED.equals(keyDescriptor.getUse())) {
|
||||
for (X509Data x509data : keyDescriptor.getKeyInfo().getX509Datas()) {
|
||||
for (X509Certificate x509Certificate : x509data.getX509Certificates()) {
|
||||
configProperties.put(SettingsBuilder.IDP_X509CERTMULTI_PROPERTY_KEY + "." + (i++),
|
||||
x509Certificate.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private SingleSignOnService findSingleSignOnService(IDPSSODescriptor idpSsoDescriptor, String binding)
|
||||
throws SamlConfigException {
|
||||
for (SingleSignOnService singleSignOnService : idpSsoDescriptor.getSingleSignOnServices()) {
|
||||
if (binding.equals(singleSignOnService.getBinding())) {
|
||||
return singleSignOnService;
|
||||
}
|
||||
}
|
||||
|
||||
throw new SamlConfigException("Could not find SingleSignOnService endpoint for binding " + binding
|
||||
+ "; available services: " + idpSsoDescriptor.getSingleSignOnServices());
|
||||
}
|
||||
|
||||
private SingleLogoutService findSingleLogoutService(IDPSSODescriptor idpSsoDescriptor, String binding)
|
||||
throws SamlConfigException {
|
||||
for (SingleLogoutService singleLogoutService : idpSsoDescriptor.getSingleLogoutServices()) {
|
||||
if (binding.equals(singleLogoutService.getBinding())) {
|
||||
return singleLogoutService;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private String buildKibanaAssertionConsumerEndpoint(String kibanaRoot) {
|
||||
|
||||
if (kibanaRoot.endsWith("/")) {
|
||||
return kibanaRoot + "_opendistro/_security/saml/acs";
|
||||
} else {
|
||||
return kibanaRoot + "/_opendistro/_security/saml/acs";
|
||||
}
|
||||
}
|
||||
|
||||
static class SamlSettingsMap implements Map<String, Object> {
|
||||
|
||||
private static final String KEY_PREFIX = "onelogin.saml2.";
|
||||
|
||||
private Settings settings;
|
||||
|
||||
SamlSettingsMap(Settings settings) {
|
||||
this.settings = settings.getAsSettings("validator");
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return this.settings.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return this.settings.isEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsKey(Object key) {
|
||||
return this.settings.hasValue(this.adaptKey(key));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean containsValue(Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object get(Object key) {
|
||||
return this.settings.get(this.adaptKey(key));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object put(String key, Object value) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object remove(Object key) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAll(Map<? extends String, ? extends Object> m) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> keySet() {
|
||||
return this.settings.keySet().stream().map((s) -> KEY_PREFIX + s).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Object> values() {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<Entry<String, Object>> entrySet() {
|
||||
Set<Entry<String, Object>> result = new HashSet<>();
|
||||
|
||||
for (String key : this.settings.keySet()) {
|
||||
result.add(new AbstractMap.SimpleEntry<String, Object>(KEY_PREFIX + key, this.settings.get(key)));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private String adaptKey(Object keyObject) {
|
||||
if (keyObject == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String key = String.valueOf(keyObject);
|
||||
|
||||
if (key.startsWith(KEY_PREFIX)) {
|
||||
return key.substring(KEY_PREFIX.length());
|
||||
} else {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
public class SamlConfigException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 6888715101647475455L;
|
||||
|
||||
public SamlConfigException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public SamlConfigException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public SamlConfigException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public SamlConfigException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public SamlConfigException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,69 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.opensaml.saml.metadata.resolver.impl.FilesystemMetadataResolver;
|
||||
|
||||
import net.shibboleth.utilities.java.support.resolver.ResolverException;
|
||||
import net.shibboleth.utilities.java.support.xml.BasicParserPool;
|
||||
|
||||
public class SamlFilesystemMetadataResolver extends FilesystemMetadataResolver {
|
||||
private static int componentIdCounter = 0;
|
||||
|
||||
SamlFilesystemMetadataResolver(Settings esSettings, Path configPath) throws Exception {
|
||||
super(getMetadataFile(esSettings, configPath));
|
||||
setId(SamlFilesystemMetadataResolver.class.getName() + "_" + (++componentIdCounter));
|
||||
setRequireValidMetadata(true);
|
||||
BasicParserPool basicParserPool = new BasicParserPool();
|
||||
basicParserPool.initialize();
|
||||
setParserPool(basicParserPool);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] fetchMetadata() throws ResolverException {
|
||||
try {
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<byte[]>() {
|
||||
@Override
|
||||
public byte[] run() throws ResolverException {
|
||||
return SamlFilesystemMetadataResolver.super.fetchMetadata();
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
|
||||
if (e.getCause() instanceof ResolverException) {
|
||||
throw (ResolverException) e.getCause();
|
||||
} else {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static File getMetadataFile(Settings settings, Path configPath) {
|
||||
|
||||
String originalPath = settings.get("idp.metadata_file", null);
|
||||
Environment env = new Environment(settings, configPath);
|
||||
|
||||
return env.configFile().resolve(originalPath).toAbsolutePath().toFile();
|
||||
}
|
||||
}
|
@ -0,0 +1,113 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
import org.apache.http.client.HttpClient;
|
||||
import org.apache.http.impl.client.HttpClientBuilder;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.opensaml.saml.metadata.resolver.impl.HTTPMetadataResolver;
|
||||
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator;
|
||||
|
||||
import net.shibboleth.utilities.java.support.resolver.ResolverException;
|
||||
import net.shibboleth.utilities.java.support.xml.BasicParserPool;
|
||||
|
||||
public class SamlHTTPMetadataResolver extends HTTPMetadataResolver {
|
||||
private static int componentIdCounter = 0;
|
||||
|
||||
SamlHTTPMetadataResolver(Settings esSettings, Path configPath) throws Exception {
|
||||
super(createHttpClient(esSettings, configPath), esSettings.get("idp.metadata_url"));
|
||||
setId(HTTPSamlAuthenticator.class.getName() + "_" + (++componentIdCounter));
|
||||
setRequireValidMetadata(true);
|
||||
setFailFastInitialization(false);
|
||||
setMinRefreshDelay(esSettings.getAsLong("idp.min_refresh_delay", 60L * 1000L));
|
||||
setMaxRefreshDelay(esSettings.getAsLong("idp.max_refresh_delay", 14400000L));
|
||||
setRefreshDelayFactor(esSettings.getAsFloat("idp.refresh_delay_factor", 0.75f));
|
||||
BasicParserPool basicParserPool = new BasicParserPool();
|
||||
basicParserPool.initialize();
|
||||
setParserPool(basicParserPool);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected byte[] fetchMetadata() throws ResolverException {
|
||||
try {
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<byte[]>() {
|
||||
@Override
|
||||
public byte[] run() throws ResolverException {
|
||||
return SamlHTTPMetadataResolver.super.fetchMetadata();
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
|
||||
if (e.getCause() instanceof ResolverException) {
|
||||
throw (ResolverException) e.getCause();
|
||||
} else {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static SettingsBasedSSLConfigurator.SSLConfig getSSLConfig(Settings settings, Path configPath)
|
||||
throws Exception {
|
||||
return new SettingsBasedSSLConfigurator(settings, configPath, "idp").buildSSLConfig();
|
||||
}
|
||||
|
||||
private static HttpClient createHttpClient(Settings settings, Path configPath) throws Exception {
|
||||
try {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<HttpClient>() {
|
||||
@Override
|
||||
public HttpClient run() throws Exception {
|
||||
return createHttpClient0(settings, configPath);
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getCause() instanceof Exception) {
|
||||
throw (Exception) e.getCause();
|
||||
} else {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static HttpClient createHttpClient0(Settings settings, Path configPath) throws Exception {
|
||||
|
||||
HttpClientBuilder builder = HttpClients.custom();
|
||||
|
||||
builder.useSystemProperties();
|
||||
|
||||
SettingsBasedSSLConfigurator.SSLConfig sslConfig = getSSLConfig(settings, configPath);
|
||||
|
||||
if (sslConfig != null) {
|
||||
builder.setSSLSocketFactory(sslConfig.toSSLConnectionSocketFactory());
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,77 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
public class SamlNameIdFormat {
|
||||
private static Map<String, SamlNameIdFormat> KNOWN_NAME_ID_FORMATS_BY_URI = new HashMap<>();
|
||||
private static Map<String, SamlNameIdFormat> KNOWN_NAME_ID_FORMATS_BY_SHORT_NAME = new HashMap<>();
|
||||
|
||||
static {
|
||||
add("urn:oasis:names:tc:SAML:1.1:nameid-format:unspecified", "u");
|
||||
add("urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress", "email");
|
||||
add("urn:oasis:names:tc:SAML:1.1:nameid-format:X509SubjectName", "sn");
|
||||
add("urn:oasis:names:tc:SAML:2.0:nameid-format:kerberos", "ker");
|
||||
add("urn:oasis:names:tc:SAML:2.0:nameid-format:entity", "ent");
|
||||
add("urn:oasis:names:tc:SAML:2.0:nameid-format:persistent", "p");
|
||||
add("urn:oasis:names:tc:SAML:2.0:nameid-format:transient", "t");
|
||||
}
|
||||
|
||||
private final String uri;
|
||||
private final String shortName;
|
||||
|
||||
SamlNameIdFormat(String uri, String shortName) {
|
||||
this.uri = uri;
|
||||
this.shortName = shortName;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public String getShortName() {
|
||||
return shortName;
|
||||
}
|
||||
|
||||
static SamlNameIdFormat getByUri(String uri) {
|
||||
SamlNameIdFormat samlNameIdFormat = KNOWN_NAME_ID_FORMATS_BY_URI.get(uri);
|
||||
|
||||
if (samlNameIdFormat == null) {
|
||||
samlNameIdFormat = new SamlNameIdFormat(uri, uri);
|
||||
}
|
||||
|
||||
return samlNameIdFormat;
|
||||
}
|
||||
|
||||
static SamlNameIdFormat getByShortName(String shortNameOrUri) {
|
||||
SamlNameIdFormat samlNameIdFormat = KNOWN_NAME_ID_FORMATS_BY_SHORT_NAME.get(shortNameOrUri);
|
||||
|
||||
if (samlNameIdFormat == null) {
|
||||
samlNameIdFormat = new SamlNameIdFormat(shortNameOrUri, shortNameOrUri);
|
||||
}
|
||||
|
||||
return samlNameIdFormat;
|
||||
}
|
||||
|
||||
private static void add(String uri, String shortName) {
|
||||
SamlNameIdFormat samlNameIdFormat = new SamlNameIdFormat(uri, shortName);
|
||||
KNOWN_NAME_ID_FORMATS_BY_URI.put(uri, samlNameIdFormat);
|
||||
KNOWN_NAME_ID_FORMATS_BY_SHORT_NAME.put(shortName, samlNameIdFormat);
|
||||
}
|
||||
|
||||
}
|
79
src/main/java/com/amazon/dlic/auth/ldap/LdapUser.java
Executable file
79
src/main/java/com/amazon/dlic/auth/ldap/LdapUser.java
Executable file
@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.ldaptive.LdapAttribute;
|
||||
import org.ldaptive.LdapEntry;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.support.WildcardMatcher;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
|
||||
public class LdapUser extends User {
|
||||
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final transient LdapEntry userEntry;
|
||||
private final String originalUsername;
|
||||
|
||||
public LdapUser(final String name, String originalUsername, final LdapEntry userEntry,
|
||||
final AuthCredentials credentials, int customAttrMaxValueLen, List<String> whiteListedAttributes) {
|
||||
super(name, null, credentials);
|
||||
this.originalUsername = originalUsername;
|
||||
this.userEntry = userEntry;
|
||||
Map<String, String> attributes = getCustomAttributesMap();
|
||||
attributes.put("ldap.original.username", originalUsername);
|
||||
attributes.put("ldap.dn", userEntry.getDn());
|
||||
|
||||
if (customAttrMaxValueLen > 0) {
|
||||
for (LdapAttribute attr : userEntry.getAttributes()) {
|
||||
if (attr != null && !attr.isBinary() && !attr.getName().toLowerCase().contains("password")) {
|
||||
final String val = attr.getStringValue();
|
||||
// only consider attributes which are not binary and where its value is not
|
||||
// longer than customAttrMaxValueLen characters
|
||||
if (val != null && val.length() > 0 && val.length() <= customAttrMaxValueLen) {
|
||||
if (whiteListedAttributes != null && !whiteListedAttributes.isEmpty()) {
|
||||
if (WildcardMatcher.matchAny(whiteListedAttributes, attr.getName())) {
|
||||
attributes.put("attr.ldap." + attr.getName(), val);
|
||||
}
|
||||
} else {
|
||||
attributes.put("attr.ldap." + attr.getName(), val);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* May return null because ldapEntry is transient
|
||||
*
|
||||
* @return ldapEntry or null if object was deserialized
|
||||
*/
|
||||
public LdapEntry getUserEntry() {
|
||||
return userEntry;
|
||||
}
|
||||
|
||||
public String getDn() {
|
||||
return userEntry.getDn();
|
||||
}
|
||||
|
||||
public String getOriginalUsername() {
|
||||
return originalUsername;
|
||||
}
|
||||
}
|
287
src/main/java/com/amazon/dlic/auth/ldap/backend/LDAPAuthenticationBackend.java
Executable file
287
src/main/java/com/amazon/dlic/auth/ldap/backend/LDAPAuthenticationBackend.java
Executable file
@ -0,0 +1,287 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap.backend;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.ldaptive.BindRequest;
|
||||
import org.ldaptive.Connection;
|
||||
import org.ldaptive.Credential;
|
||||
import org.ldaptive.LdapEntry;
|
||||
import org.ldaptive.LdapException;
|
||||
import org.ldaptive.Response;
|
||||
import org.ldaptive.SearchScope;
|
||||
|
||||
import com.amazon.dlic.auth.ldap.LdapUser;
|
||||
import com.amazon.dlic.auth.ldap.util.ConfigConstants;
|
||||
import com.amazon.dlic.auth.ldap.util.LdapHelper;
|
||||
import com.amazon.dlic.auth.ldap.util.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.AuthenticationBackend;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
|
||||
public class LDAPAuthenticationBackend implements AuthenticationBackend {
|
||||
|
||||
static final String ZERO_PLACEHOLDER = "{0}";
|
||||
static final String DEFAULT_USERBASE = "";
|
||||
static final String DEFAULT_USERSEARCH_PATTERN = "(sAMAccountName={0})";
|
||||
|
||||
static {
|
||||
Utils.init();
|
||||
}
|
||||
|
||||
protected static final Logger log = LogManager.getLogger(LDAPAuthenticationBackend.class);
|
||||
|
||||
private final Settings settings;
|
||||
private final Path configPath;
|
||||
private final List<Map.Entry<String, Settings>> userBaseSettings;
|
||||
|
||||
public LDAPAuthenticationBackend(final Settings settings, final Path configPath) {
|
||||
this.settings = settings;
|
||||
this.configPath = configPath;
|
||||
this.userBaseSettings = getUserBaseSettings(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public User authenticate(final AuthCredentials credentials) throws ElasticsearchSecurityException {
|
||||
|
||||
Connection ldapConnection = null;
|
||||
final String user = Utils.escapeStringRfc2254(credentials.getUsername());
|
||||
byte[] password = credentials.getPassword();
|
||||
|
||||
try {
|
||||
|
||||
ldapConnection = LDAPAuthorizationBackend.getConnection(settings, configPath);
|
||||
|
||||
LdapEntry entry = exists(user, ldapConnection, settings, userBaseSettings);
|
||||
|
||||
// fake a user that no exists
|
||||
// makes guessing if a user exists or not harder when looking on the
|
||||
// authentication delay time
|
||||
if (entry == null && settings.getAsBoolean(ConfigConstants.LDAP_FAKE_LOGIN_ENABLED, false)) {
|
||||
String fakeLognDn = settings.get(ConfigConstants.LDAP_FAKE_LOGIN_DN,
|
||||
"CN=faketomakebindfail,DC=" + UUID.randomUUID().toString());
|
||||
entry = new LdapEntry(fakeLognDn);
|
||||
password = settings.get(ConfigConstants.LDAP_FAKE_LOGIN_PASSWORD, "fakeLoginPwd123")
|
||||
.getBytes(StandardCharsets.UTF_8);
|
||||
} else if (entry == null) {
|
||||
throw new ElasticsearchSecurityException("No user " + user + " found");
|
||||
}
|
||||
|
||||
final String dn = entry.getDn();
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Try to authenticate dn {}", dn);
|
||||
}
|
||||
|
||||
final BindRequest br = new BindRequest(dn, new Credential(password));
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
final Connection _con = ldapConnection;
|
||||
|
||||
try {
|
||||
AccessController.doPrivileged(new PrivilegedExceptionAction<Response<Void>>() {
|
||||
@Override
|
||||
public Response<Void> run() throws LdapException {
|
||||
return _con.reopen(br);
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
throw e.getException();
|
||||
}
|
||||
|
||||
final String usernameAttribute = settings.get(ConfigConstants.LDAP_AUTHC_USERNAME_ATTRIBUTE, null);
|
||||
String username = dn;
|
||||
|
||||
if (usernameAttribute != null && entry.getAttribute(usernameAttribute) != null) {
|
||||
username = entry.getAttribute(usernameAttribute).getStringValue();
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Authenticated username {}", username);
|
||||
}
|
||||
|
||||
final int customAttrMaxValueLen = settings.getAsInt(ConfigConstants.LDAP_CUSTOM_ATTR_MAXVAL_LEN, 36);
|
||||
final List<String> whitelistedAttributes = settings.getAsList(ConfigConstants.LDAP_CUSTOM_ATTR_WHITELIST,
|
||||
null);
|
||||
|
||||
// by default all ldap attributes which are not binary and with a max value
|
||||
// length of 36 are included in the user object
|
||||
// if the whitelist contains at least one value then all attributes will be
|
||||
// additional check if whitelisted (whitelist can contain wildcard and regex)
|
||||
return new LdapUser(username, user, entry, credentials, customAttrMaxValueLen, whitelistedAttributes);
|
||||
|
||||
} catch (final Exception e) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Unable to authenticate user due to ", e);
|
||||
}
|
||||
throw new ElasticsearchSecurityException(e.toString(), e);
|
||||
} finally {
|
||||
Arrays.fill(password, (byte) '\0');
|
||||
password = null;
|
||||
Utils.unbindAndCloseSilently(ldapConnection);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "ldap";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists(final User user) {
|
||||
Connection ldapConnection = null;
|
||||
String userName = user.getName();
|
||||
|
||||
if (user instanceof LdapUser) {
|
||||
userName = ((LdapUser) user).getUserEntry().getDn();
|
||||
}
|
||||
|
||||
try {
|
||||
ldapConnection = LDAPAuthorizationBackend.getConnection(settings, configPath);
|
||||
return exists(userName, ldapConnection, settings, userBaseSettings) != null;
|
||||
} catch (final Exception e) {
|
||||
log.warn("User {} does not exist due to " + e, userName);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("User does not exist due to ", e);
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
Utils.unbindAndCloseSilently(ldapConnection);
|
||||
}
|
||||
}
|
||||
|
||||
static List<Map.Entry<String, Settings>> getUserBaseSettings(Settings settings) {
|
||||
Map<String, Settings> userBaseSettingsMap = new HashMap<>(
|
||||
settings.getGroups(ConfigConstants.LDAP_AUTHCZ_USERS));
|
||||
|
||||
if (!userBaseSettingsMap.isEmpty()) {
|
||||
if (settings.hasValue(ConfigConstants.LDAP_AUTHC_USERBASE)) {
|
||||
throw new RuntimeException(
|
||||
"Both old-style and new-style configuration defined for LDAP authentication backend: "
|
||||
+ settings);
|
||||
}
|
||||
|
||||
return Utils.getOrderedBaseSettings(userBaseSettingsMap);
|
||||
} else {
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_BASE,
|
||||
settings.get(ConfigConstants.LDAP_AUTHC_USERBASE, DEFAULT_USERBASE));
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_SEARCH,
|
||||
settings.get(ConfigConstants.LDAP_AUTHC_USERSEARCH, DEFAULT_USERSEARCH_PATTERN));
|
||||
|
||||
return Collections.singletonList(Pair.of("_legacyConfig", settingsBuilder.build()));
|
||||
}
|
||||
}
|
||||
|
||||
static LdapEntry exists(final String user, Connection ldapConnection, Settings settings,
|
||||
List<Map.Entry<String, Settings>> userBaseSettings) throws Exception {
|
||||
|
||||
if (settings.getAsBoolean(ConfigConstants.LDAP_FAKE_LOGIN_ENABLED, false)
|
||||
|| settings.getAsBoolean(ConfigConstants.LDAP_SEARCH_ALL_BASES, false)
|
||||
|| settings.hasValue(ConfigConstants.LDAP_AUTHC_USERBASE)) {
|
||||
return existsSearchingAllBases(user, ldapConnection, userBaseSettings);
|
||||
} else {
|
||||
return existsSearchingUntilFirstHit(user, ldapConnection, userBaseSettings);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static LdapEntry existsSearchingUntilFirstHit(final String user, Connection ldapConnection,
|
||||
List<Map.Entry<String, Settings>> userBaseSettings) throws Exception {
|
||||
final String username = Utils.escapeStringRfc2254(user);
|
||||
|
||||
for (Map.Entry<String, Settings> entry : userBaseSettings) {
|
||||
Settings baseSettings = entry.getValue();
|
||||
|
||||
List<LdapEntry> result = LdapHelper.search(ldapConnection,
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_USERBASE),
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_USERSEARCH_PATTERN)
|
||||
.replace(ZERO_PLACEHOLDER, username),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Results for LDAP search for " + user + " in base " + entry.getKey() + ":\n" + result);
|
||||
}
|
||||
|
||||
if (result != null && result.size() >= 1) {
|
||||
return result.get(0);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static LdapEntry existsSearchingAllBases(final String user, Connection ldapConnection,
|
||||
List<Map.Entry<String, Settings>> userBaseSettings) throws Exception {
|
||||
final String username = Utils.escapeStringRfc2254(user);
|
||||
Set<LdapEntry> result = new HashSet<>();
|
||||
|
||||
for (Map.Entry<String, Settings> entry : userBaseSettings) {
|
||||
Settings baseSettings = entry.getValue();
|
||||
|
||||
List<LdapEntry> foundEntries = LdapHelper.search(ldapConnection,
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_USERBASE),
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_USERSEARCH_PATTERN)
|
||||
.replace(ZERO_PLACEHOLDER, username),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Results for LDAP search for " + user + " in base " + entry.getKey() + ":\n" + result);
|
||||
}
|
||||
|
||||
if (foundEntries != null) {
|
||||
result.addAll(foundEntries);
|
||||
}
|
||||
}
|
||||
|
||||
if (result.isEmpty()) {
|
||||
log.debug("No user " + username + " found");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (result.size() > 1) {
|
||||
log.debug("More than one user for '" + username + "' found");
|
||||
return null;
|
||||
}
|
||||
|
||||
return result.iterator().next();
|
||||
}
|
||||
|
||||
}
|
956
src/main/java/com/amazon/dlic/auth/ldap/backend/LDAPAuthorizationBackend.java
Executable file
956
src/main/java/com/amazon/dlic/auth/ldap/backend/LDAPAuthorizationBackend.java
Executable file
@ -0,0 +1,956 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap.backend;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.KeyStore;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.PrivateKey;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.time.Duration;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.naming.InvalidNameException;
|
||||
import javax.naming.ldap.LdapName;
|
||||
import javax.naming.ldap.Rdn;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.ldaptive.BindRequest;
|
||||
import org.ldaptive.Connection;
|
||||
import org.ldaptive.ConnectionConfig;
|
||||
import org.ldaptive.Credential;
|
||||
import org.ldaptive.DefaultConnectionFactory;
|
||||
import org.ldaptive.LdapAttribute;
|
||||
import org.ldaptive.LdapEntry;
|
||||
import org.ldaptive.LdapException;
|
||||
import org.ldaptive.Response;
|
||||
import org.ldaptive.SearchScope;
|
||||
import org.ldaptive.control.RequestControl;
|
||||
import org.ldaptive.provider.ProviderConnection;
|
||||
import org.ldaptive.sasl.ExternalConfig;
|
||||
import org.ldaptive.ssl.AllowAnyHostnameVerifier;
|
||||
import org.ldaptive.ssl.AllowAnyTrustManager;
|
||||
import org.ldaptive.ssl.CredentialConfig;
|
||||
import org.ldaptive.ssl.CredentialConfigFactory;
|
||||
import org.ldaptive.ssl.SslConfig;
|
||||
import org.ldaptive.ssl.ThreadLocalTLSSocketFactory;
|
||||
|
||||
import com.amazon.dlic.auth.ldap.LdapUser;
|
||||
import com.amazon.dlic.auth.ldap.util.ConfigConstants;
|
||||
import com.amazon.dlic.auth.ldap.util.LdapHelper;
|
||||
import com.amazon.dlic.auth.ldap.util.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.AuthorizationBackend;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.util.SSLConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.PemKeyReader;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.WildcardMatcher;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
|
||||
import io.netty.util.internal.PlatformDependent;
|
||||
|
||||
public class LDAPAuthorizationBackend implements AuthorizationBackend {
|
||||
|
||||
private static final String COM_SUN_JNDI_LDAP_OBJECT_DISABLE_ENDPOINT_IDENTIFICATION = "com.sun.jndi.ldap.object.disableEndpointIdentification";
|
||||
private static final List<String> DEFAULT_TLS_PROTOCOLS = Arrays.asList("TLSv1.2", "TLSv1.1");
|
||||
static final String ONE_PLACEHOLDER = "{1}";
|
||||
static final String TWO_PLACEHOLDER = "{2}";
|
||||
static final String DEFAULT_ROLEBASE = "";
|
||||
static final String DEFAULT_ROLESEARCH = "(member={0})";
|
||||
static final String DEFAULT_ROLENAME = "name";
|
||||
static final String DEFAULT_USERROLENAME = "memberOf";
|
||||
|
||||
static {
|
||||
Utils.init();
|
||||
}
|
||||
|
||||
protected static final Logger log = LogManager.getLogger(LDAPAuthorizationBackend.class);
|
||||
private final Settings settings;
|
||||
private final Path configPath;
|
||||
private final List<Map.Entry<String, Settings>> roleBaseSettings;
|
||||
private final List<Map.Entry<String, Settings>> userBaseSettings;
|
||||
|
||||
public LDAPAuthorizationBackend(final Settings settings, final Path configPath) {
|
||||
this.settings = settings;
|
||||
this.configPath = configPath;
|
||||
this.roleBaseSettings = getRoleSearchSettings(settings);
|
||||
this.userBaseSettings = LDAPAuthenticationBackend.getUserBaseSettings(settings);
|
||||
}
|
||||
|
||||
public static Connection getConnection(final Settings settings, final Path configPath) throws Exception {
|
||||
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<Connection>() {
|
||||
@Override
|
||||
public Connection run() throws Exception {
|
||||
boolean isJava9OrHigher = PlatformDependent.javaVersion() >= 9;
|
||||
ClassLoader originalClassloader = null;
|
||||
if (isJava9OrHigher) {
|
||||
originalClassloader = Thread.currentThread().getContextClassLoader();
|
||||
Thread.currentThread().setContextClassLoader(new Java9CL());
|
||||
}
|
||||
|
||||
return getConnection0(settings, configPath, originalClassloader, isJava9OrHigher);
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
throw e.getException();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static List<Map.Entry<String, Settings>> getRoleSearchSettings(Settings settings) {
|
||||
Map<String, Settings> groupedSettings = settings.getGroups(ConfigConstants.LDAP_AUTHZ_ROLES, true);
|
||||
|
||||
if (!groupedSettings.isEmpty()) {
|
||||
// New style settings
|
||||
return Utils.getOrderedBaseSettings(groupedSettings);
|
||||
} else {
|
||||
// Old style settings
|
||||
return convertOldStyleSettingsToNewStyle(settings);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<Map.Entry<String, Settings>> convertOldStyleSettingsToNewStyle(Settings settings) {
|
||||
Map<String, Settings> result = new HashMap<>(1);
|
||||
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_BASE,
|
||||
settings.get(ConfigConstants.LDAP_AUTHZ_ROLEBASE, DEFAULT_ROLEBASE));
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_SEARCH,
|
||||
settings.get(ConfigConstants.LDAP_AUTHZ_ROLESEARCH, DEFAULT_ROLESEARCH));
|
||||
|
||||
result.put("convertedOldStyleSettings", settingsBuilder.build());
|
||||
|
||||
return Collections.singletonList(result.entrySet().iterator().next());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Connection getConnection0(final Settings settings, final Path configPath, final ClassLoader cl,
|
||||
final boolean needRestore) throws KeyStoreException, NoSuchAlgorithmException, CertificateException,
|
||||
FileNotFoundException, IOException, LdapException {
|
||||
final boolean enableSSL = settings.getAsBoolean(ConfigConstants.LDAPS_ENABLE_SSL, false);
|
||||
|
||||
final List<String> ldapHosts = settings.getAsList(ConfigConstants.LDAP_HOSTS,
|
||||
Collections.singletonList("localhost"));
|
||||
|
||||
Connection connection = null;
|
||||
Exception lastException = null;
|
||||
|
||||
for (String ldapHost : ldapHosts) {
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Connect to {}", ldapHost);
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
final String[] split = ldapHost.split(":");
|
||||
|
||||
int port;
|
||||
|
||||
if (split.length > 1) {
|
||||
port = Integer.parseInt(split[1]);
|
||||
} else {
|
||||
port = enableSSL ? 636 : 389;
|
||||
}
|
||||
|
||||
final ConnectionConfig config = new ConnectionConfig();
|
||||
config.setLdapUrl("ldap" + (enableSSL ? "s" : "") + "://" + split[0] + ":" + port);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Connect to {}", config.getLdapUrl());
|
||||
}
|
||||
|
||||
final Map<String, Object> props = configureSSL(config, settings, configPath);
|
||||
|
||||
DefaultConnectionFactory connFactory = new DefaultConnectionFactory(config);
|
||||
connFactory.getProvider().getProviderConfig().setProperties(props);
|
||||
connection = connFactory.getConnection();
|
||||
|
||||
final String bindDn = settings.get(ConfigConstants.LDAP_BIND_DN, null);
|
||||
final String password = settings.get(ConfigConstants.LDAP_PASSWORD, null);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("bindDn {}, password {}", bindDn,
|
||||
password != null && password.length() > 0 ? "****" : "<not set>");
|
||||
}
|
||||
|
||||
if (bindDn != null && (password == null || password.length() == 0)) {
|
||||
log.error("No password given for bind_dn {}. Will try to authenticate anonymously to ldap", bindDn);
|
||||
}
|
||||
|
||||
final boolean enableClientAuth = settings.getAsBoolean(ConfigConstants.LDAPS_ENABLE_SSL_CLIENT_AUTH,
|
||||
ConfigConstants.LDAPS_ENABLE_SSL_CLIENT_AUTH_DEFAULT);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
if (enableClientAuth && bindDn == null) {
|
||||
log.debug("Will perform External SASL bind because client cert authentication is enabled");
|
||||
} else if (bindDn == null) {
|
||||
log.debug("Will perform anonymous bind because no bind dn is given");
|
||||
} else if (enableClientAuth && bindDn != null) {
|
||||
log.debug(
|
||||
"Will perform simple bind with bind dn because to bind dn is given and overrides client cert authentication");
|
||||
} else if (!enableClientAuth && bindDn != null) {
|
||||
log.debug("Will perform simple bind with bind dn");
|
||||
}
|
||||
}
|
||||
|
||||
BindRequest br = enableClientAuth ? new BindRequest(new ExternalConfig()) : new BindRequest();
|
||||
|
||||
if (bindDn != null && password != null && password.length() > 0) {
|
||||
br = new BindRequest(bindDn, new Credential(password));
|
||||
}
|
||||
|
||||
connection.open(br);
|
||||
|
||||
if (connection != null && connection.isOpen()) {
|
||||
break;
|
||||
}
|
||||
} catch (final Exception e) {
|
||||
lastException = e;
|
||||
log.warn("Unable to connect to ldapserver {} due to {}. Try next.", ldapHost, e.toString());
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Unable to connect to ldapserver due to ", e);
|
||||
}
|
||||
Utils.unbindAndCloseSilently(connection);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (connection == null || !connection.isOpen()) {
|
||||
if (lastException == null) {
|
||||
throw new LdapException("Unable to connect to any of those ldap servers " + ldapHosts);
|
||||
} else {
|
||||
throw new LdapException(
|
||||
"Unable to connect to any of those ldap servers " + ldapHosts + " due to " + lastException,
|
||||
lastException);
|
||||
}
|
||||
}
|
||||
|
||||
final Connection delegate = connection;
|
||||
|
||||
return new Connection() {
|
||||
|
||||
@Override
|
||||
public Response<Void> reopen(BindRequest request) throws LdapException {
|
||||
return delegate.reopen(request);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response<Void> reopen() throws LdapException {
|
||||
return delegate.reopen();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response<Void> open(BindRequest request) throws LdapException {
|
||||
return delegate.open(request);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Response<Void> open() throws LdapException {
|
||||
return delegate.open();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isOpen() {
|
||||
return delegate.isOpen();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProviderConnection getProviderConnection() {
|
||||
return delegate.getProviderConnection();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ConnectionConfig getConnectionConfig() {
|
||||
return delegate.getConnectionConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close(RequestControl[] controls) {
|
||||
try {
|
||||
delegate.close(controls);
|
||||
} finally {
|
||||
restoreClassLoader();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
try {
|
||||
delegate.close();
|
||||
} finally {
|
||||
restoreClassLoader();
|
||||
}
|
||||
}
|
||||
|
||||
private void restoreClassLoader() {
|
||||
if (needRestore) {
|
||||
try {
|
||||
AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() {
|
||||
@Override
|
||||
public Void run() throws Exception {
|
||||
Thread.currentThread().setContextClassLoader(cl);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
log.warn("Unable to restore classloader because of " + e.getException(), e.getException());
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static Map<String, Object> configureSSL(final ConnectionConfig config, final Settings settings,
|
||||
final Path configPath) throws Exception {
|
||||
|
||||
final Map<String, Object> props = new HashMap<String, Object>();
|
||||
final boolean enableSSL = settings.getAsBoolean(ConfigConstants.LDAPS_ENABLE_SSL, false);
|
||||
final boolean enableStartTLS = settings.getAsBoolean(ConfigConstants.LDAPS_ENABLE_START_TLS, false);
|
||||
|
||||
if (enableSSL || enableStartTLS) {
|
||||
|
||||
final boolean enableClientAuth = settings.getAsBoolean(ConfigConstants.LDAPS_ENABLE_SSL_CLIENT_AUTH,
|
||||
ConfigConstants.LDAPS_ENABLE_SSL_CLIENT_AUTH_DEFAULT);
|
||||
|
||||
final boolean trustAll = settings.getAsBoolean(ConfigConstants.LDAPS_TRUST_ALL, false);
|
||||
|
||||
final boolean verifyHostnames = !trustAll && settings.getAsBoolean(ConfigConstants.LDAPS_VERIFY_HOSTNAMES,
|
||||
ConfigConstants.LDAPS_VERIFY_HOSTNAMES_DEFAULT);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("verifyHostname {}:", verifyHostnames);
|
||||
log.debug("trustall {}:", trustAll);
|
||||
}
|
||||
|
||||
if (enableStartTLS && !verifyHostnames) {
|
||||
props.put("jndi.starttls.allowAnyHostname", "true");
|
||||
}
|
||||
|
||||
final boolean pem = settings.get(ConfigConstants.LDAPS_PEMTRUSTEDCAS_FILEPATH, null) != null
|
||||
|| settings.get(ConfigConstants.LDAPS_PEMTRUSTEDCAS_CONTENT, null) != null;
|
||||
|
||||
final SslConfig sslConfig = new SslConfig();
|
||||
CredentialConfig cc;
|
||||
|
||||
if (pem) {
|
||||
X509Certificate[] trustCertificates = PemKeyReader.loadCertificatesFromStream(
|
||||
PemKeyReader.resolveStream(ConfigConstants.LDAPS_PEMTRUSTEDCAS_CONTENT, settings));
|
||||
|
||||
if (trustCertificates == null) {
|
||||
trustCertificates = PemKeyReader.loadCertificatesFromFile(PemKeyReader
|
||||
.resolve(ConfigConstants.LDAPS_PEMTRUSTEDCAS_FILEPATH, settings, configPath, !trustAll));
|
||||
}
|
||||
// for client authentication
|
||||
X509Certificate authenticationCertificate = PemKeyReader.loadCertificateFromStream(
|
||||
PemKeyReader.resolveStream(ConfigConstants.LDAPS_PEMCERT_CONTENT, settings));
|
||||
|
||||
if (authenticationCertificate == null) {
|
||||
authenticationCertificate = PemKeyReader.loadCertificateFromFile(PemKeyReader
|
||||
.resolve(ConfigConstants.LDAPS_PEMCERT_FILEPATH, settings, configPath, enableClientAuth));
|
||||
}
|
||||
|
||||
PrivateKey authenticationKey = PemKeyReader.loadKeyFromStream(
|
||||
settings.get(ConfigConstants.LDAPS_PEMKEY_PASSWORD),
|
||||
PemKeyReader.resolveStream(ConfigConstants.LDAPS_PEMKEY_CONTENT, settings));
|
||||
|
||||
if (authenticationKey == null) {
|
||||
authenticationKey = PemKeyReader
|
||||
.loadKeyFromFile(settings.get(ConfigConstants.LDAPS_PEMKEY_PASSWORD), PemKeyReader.resolve(
|
||||
ConfigConstants.LDAPS_PEMKEY_FILEPATH, settings, configPath, enableClientAuth));
|
||||
}
|
||||
|
||||
cc = CredentialConfigFactory.createX509CredentialConfig(trustCertificates, authenticationCertificate,
|
||||
authenticationKey);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Use PEM to secure communication with LDAP server (client auth is {})",
|
||||
authenticationKey != null);
|
||||
}
|
||||
|
||||
} else {
|
||||
final KeyStore trustStore = PemKeyReader.loadKeyStore(
|
||||
PemKeyReader.resolve(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH, settings,
|
||||
configPath, !trustAll),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_PASSWORD,
|
||||
SSLConfigConstants.DEFAULT_STORE_PASSWORD),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_TYPE));
|
||||
|
||||
final List<String> trustStoreAliases = settings.getAsList(ConfigConstants.LDAPS_JKS_TRUST_ALIAS, null);
|
||||
|
||||
// for client authentication
|
||||
final KeyStore keyStore = PemKeyReader.loadKeyStore(
|
||||
PemKeyReader.resolve(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH, settings,
|
||||
configPath, enableClientAuth),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_PASSWORD,
|
||||
SSLConfigConstants.DEFAULT_STORE_PASSWORD),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_TYPE));
|
||||
final String keyStorePassword = settings.get(
|
||||
SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_PASSWORD,
|
||||
SSLConfigConstants.DEFAULT_STORE_PASSWORD);
|
||||
|
||||
final String keyStoreAlias = settings.get(ConfigConstants.LDAPS_JKS_CERT_ALIAS, null);
|
||||
final String[] keyStoreAliases = keyStoreAlias == null ? null : new String[] { keyStoreAlias };
|
||||
|
||||
if (enableClientAuth && keyStoreAliases == null) {
|
||||
throw new IllegalArgumentException(ConfigConstants.LDAPS_JKS_CERT_ALIAS + " not given");
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Use Trust-/Keystore to secure communication with LDAP server (client auth is {})",
|
||||
keyStore != null);
|
||||
log.debug("trustStoreAliases: {}, keyStoreAlias: {}", trustStoreAliases, keyStoreAlias);
|
||||
}
|
||||
|
||||
cc = CredentialConfigFactory.createKeyStoreCredentialConfig(trustStore,
|
||||
trustStoreAliases == null ? null : trustStoreAliases.toArray(new String[0]), keyStore,
|
||||
keyStorePassword, keyStoreAliases);
|
||||
|
||||
}
|
||||
|
||||
sslConfig.setCredentialConfig(cc);
|
||||
|
||||
if (trustAll) {
|
||||
sslConfig.setTrustManagers(new AllowAnyTrustManager());
|
||||
}
|
||||
|
||||
if (!verifyHostnames) {
|
||||
sslConfig.setHostnameVerifier(new AllowAnyHostnameVerifier());
|
||||
final String deiProp = System.getProperty(COM_SUN_JNDI_LDAP_OBJECT_DISABLE_ENDPOINT_IDENTIFICATION);
|
||||
|
||||
if (deiProp == null || !Boolean.parseBoolean(deiProp)) {
|
||||
log.warn("In order to disable host name verification for LDAP connections (verify_hostnames: true), "
|
||||
+ "you also need to set set the system property "+COM_SUN_JNDI_LDAP_OBJECT_DISABLE_ENDPOINT_IDENTIFICATION+" to true when starting the JVM running ES. "
|
||||
+ "This applies for all Java versions released since July 2018.");
|
||||
// See:
|
||||
// https://www.oracle.com/technetwork/java/javase/8u181-relnotes-4479407.html
|
||||
// https://www.oracle.com/technetwork/java/javase/10-0-2-relnotes-4477557.html
|
||||
// https://www.oracle.com/technetwork/java/javase/11-0-1-relnotes-5032023.html
|
||||
}
|
||||
|
||||
System.setProperty(COM_SUN_JNDI_LDAP_OBJECT_DISABLE_ENDPOINT_IDENTIFICATION, "true");
|
||||
|
||||
}
|
||||
|
||||
final List<String> enabledCipherSuites = settings.getAsList(ConfigConstants.LDAPS_ENABLED_SSL_CIPHERS,
|
||||
Collections.emptyList());
|
||||
final List<String> enabledProtocols = settings.getAsList(ConfigConstants.LDAPS_ENABLED_SSL_PROTOCOLS,
|
||||
DEFAULT_TLS_PROTOCOLS);
|
||||
|
||||
if (!enabledCipherSuites.isEmpty()) {
|
||||
sslConfig.setEnabledCipherSuites(enabledCipherSuites.toArray(new String[0]));
|
||||
log.debug("enabled ssl cipher suites for ldaps {}", enabledCipherSuites);
|
||||
}
|
||||
|
||||
log.debug("enabled ssl/tls protocols for ldaps {}", enabledProtocols);
|
||||
sslConfig.setEnabledProtocols(enabledProtocols.toArray(new String[0]));
|
||||
config.setSslConfig(sslConfig);
|
||||
}
|
||||
|
||||
config.setUseSSL(enableSSL);
|
||||
config.setUseStartTLS(enableStartTLS);
|
||||
|
||||
final long connectTimeout = settings.getAsLong(ConfigConstants.LDAP_CONNECT_TIMEOUT, 5000L); // 0L means TCP
|
||||
// default timeout
|
||||
final long responseTimeout = settings.getAsLong(ConfigConstants.LDAP_RESPONSE_TIMEOUT, 0L); // 0L means wait
|
||||
// infinitely
|
||||
|
||||
config.setConnectTimeout(Duration.ofMillis(connectTimeout < 0L ? 0L : connectTimeout)); // 5 sec by default
|
||||
config.setResponseTimeout(Duration.ofMillis(responseTimeout < 0L ? 0L : responseTimeout));
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Connect timeout: " + config.getConnectTimeout() + "/ResponseTimeout: "
|
||||
+ config.getResponseTimeout());
|
||||
}
|
||||
return props;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillRoles(final User user, final AuthCredentials optionalAuthCreds)
|
||||
throws ElasticsearchSecurityException {
|
||||
|
||||
if (user == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String authenticatedUser;
|
||||
String originalUserName;
|
||||
LdapEntry entry = null;
|
||||
String dn = null;
|
||||
|
||||
if (user instanceof LdapUser) {
|
||||
entry = ((LdapUser) user).getUserEntry();
|
||||
authenticatedUser = entry.getDn();
|
||||
originalUserName = ((LdapUser) user).getOriginalUsername();
|
||||
} else {
|
||||
authenticatedUser = Utils.escapeStringRfc2254(user.getName());
|
||||
originalUserName = user.getName();
|
||||
}
|
||||
|
||||
final boolean rolesearchEnabled = settings.getAsBoolean(ConfigConstants.LDAP_AUTHZ_ROLESEARCH_ENABLED, true);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Try to get roles for {}", authenticatedUser);
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("user class: {}", user.getClass());
|
||||
log.trace("authenticatedUser: {}", authenticatedUser);
|
||||
log.trace("originalUserName: {}", originalUserName);
|
||||
log.trace("entry: {}", String.valueOf(entry));
|
||||
log.trace("dn: {}", dn);
|
||||
}
|
||||
|
||||
final List<String> skipUsers = settings.getAsList(ConfigConstants.LDAP_AUTHZ_SKIP_USERS,
|
||||
Collections.emptyList());
|
||||
if (!skipUsers.isEmpty() && (WildcardMatcher.matchAny(skipUsers, originalUserName)
|
||||
|| WildcardMatcher.matchAny(skipUsers, authenticatedUser))) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Skipped search roles of user {}/{}", authenticatedUser, originalUserName);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
Connection connection = null;
|
||||
|
||||
try {
|
||||
|
||||
if (entry == null || dn == null) {
|
||||
|
||||
connection = getConnection(settings, configPath);
|
||||
|
||||
if (isValidDn(authenticatedUser)) {
|
||||
// assume dn
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("{} is a valid DN", authenticatedUser);
|
||||
}
|
||||
|
||||
entry = LdapHelper.lookup(connection, authenticatedUser);
|
||||
|
||||
if (entry == null) {
|
||||
throw new ElasticsearchSecurityException("No user '" + authenticatedUser + "' found");
|
||||
}
|
||||
|
||||
} else {
|
||||
entry = LDAPAuthenticationBackend.exists(user.getName(), connection, settings, userBaseSettings);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("{} is not a valid DN and was resolved to {}", authenticatedUser, entry);
|
||||
}
|
||||
|
||||
if (entry == null || entry.getDn() == null) {
|
||||
throw new ElasticsearchSecurityException("No user " + authenticatedUser + " found");
|
||||
}
|
||||
}
|
||||
|
||||
dn = entry.getDn();
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("User found with DN {}", dn);
|
||||
}
|
||||
}
|
||||
|
||||
final Set<LdapName> ldapRoles = new HashSet<>(150);
|
||||
final Set<String> nonLdapRoles = new HashSet<>(150);
|
||||
final HashMultimap<LdapName, Map.Entry<String, Settings>> resultRoleSearchBaseKeys = HashMultimap.create();
|
||||
|
||||
// Roles as an attribute of the user entry
|
||||
// default is userrolename: memberOf
|
||||
final String userRoleNames = settings.get(ConfigConstants.LDAP_AUTHZ_USERROLENAME, DEFAULT_USERROLENAME);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("raw userRoleName(s): {}", userRoleNames);
|
||||
}
|
||||
|
||||
// we support more than one rolenames, must be separated by a comma
|
||||
for (String userRoleName : userRoleNames.split(",")) {
|
||||
final String roleName = userRoleName.trim();
|
||||
if (entry.getAttribute(roleName) != null) {
|
||||
final Collection<String> userRoles = entry.getAttribute(roleName).getStringValues();
|
||||
for (final String possibleRoleDN : userRoles) {
|
||||
if (isValidDn(possibleRoleDN)) {
|
||||
LdapName ldapName = new LdapName(possibleRoleDN);
|
||||
ldapRoles.add(ldapName);
|
||||
resultRoleSearchBaseKeys.putAll(ldapName, this.roleBaseSettings);
|
||||
} else {
|
||||
nonLdapRoles.add(possibleRoleDN);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("User attr. ldap roles count: {}", ldapRoles.size());
|
||||
log.trace("User attr. ldap roles {}", ldapRoles);
|
||||
log.trace("User attr. non-ldap roles count: {}", nonLdapRoles.size());
|
||||
log.trace("User attr. non-ldap roles {}", nonLdapRoles);
|
||||
|
||||
}
|
||||
|
||||
// The attribute in a role entry containing the name of that role, Default is
|
||||
// "name".
|
||||
// Can also be "dn" to use the full DN as rolename.
|
||||
// rolename: name
|
||||
final String roleName = settings.get(ConfigConstants.LDAP_AUTHZ_ROLENAME, DEFAULT_ROLENAME);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("roleName: {}", roleName);
|
||||
}
|
||||
|
||||
// Specify the name of the attribute which value should be substituted with {2}
|
||||
// Substituted with an attribute value from user's directory entry, of the
|
||||
// authenticated user
|
||||
// userroleattribute: null
|
||||
final String userRoleAttributeName = settings.get(ConfigConstants.LDAP_AUTHZ_USERROLEATTRIBUTE, null);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("userRoleAttribute: {}", userRoleAttributeName);
|
||||
log.trace("rolesearch: {}", settings.get(ConfigConstants.LDAP_AUTHZ_ROLESEARCH, DEFAULT_ROLESEARCH));
|
||||
}
|
||||
|
||||
String userRoleAttributeValue = null;
|
||||
final LdapAttribute userRoleAttribute = entry.getAttribute(userRoleAttributeName);
|
||||
|
||||
if (userRoleAttribute != null) {
|
||||
userRoleAttributeValue = userRoleAttribute.getStringValue();
|
||||
}
|
||||
|
||||
if (rolesearchEnabled) {
|
||||
String escapedDn = Utils.escapeStringRfc2254(dn);
|
||||
|
||||
for (Map.Entry<String, Settings> roleSearchSettingsEntry : roleBaseSettings) {
|
||||
Settings roleSearchSettings = roleSearchSettingsEntry.getValue();
|
||||
|
||||
List<LdapEntry> rolesResult = LdapHelper.search(connection,
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_ROLEBASE),
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_ROLESEARCH)
|
||||
.replace(LDAPAuthenticationBackend.ZERO_PLACEHOLDER, escapedDn)
|
||||
.replace(ONE_PLACEHOLDER, originalUserName).replace(TWO_PLACEHOLDER,
|
||||
userRoleAttributeValue == null ? TWO_PLACEHOLDER : userRoleAttributeValue),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Results for LDAP group search for " + escapedDn + " in base "
|
||||
+ roleSearchSettingsEntry.getKey() + ":\n" + rolesResult);
|
||||
}
|
||||
|
||||
if (rolesResult != null && !rolesResult.isEmpty()) {
|
||||
for (final Iterator<LdapEntry> iterator = rolesResult.iterator(); iterator.hasNext();) {
|
||||
LdapEntry searchResultEntry = iterator.next();
|
||||
LdapName ldapName = new LdapName(searchResultEntry.getDn());
|
||||
ldapRoles.add(ldapName);
|
||||
resultRoleSearchBaseKeys.put(ldapName, roleSearchSettingsEntry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("roles count total {}", ldapRoles.size());
|
||||
}
|
||||
|
||||
// nested roles, makes only sense for DN style role names
|
||||
if (settings.getAsBoolean(ConfigConstants.LDAP_AUTHZ_RESOLVE_NESTED_ROLES, false)) {
|
||||
|
||||
final List<String> nestedRoleFilter = settings.getAsList(ConfigConstants.LDAP_AUTHZ_NESTEDROLEFILTER,
|
||||
Collections.emptyList());
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Evaluate nested roles");
|
||||
}
|
||||
|
||||
final Set<LdapName> nestedReturn = new HashSet<>(ldapRoles);
|
||||
|
||||
for (final LdapName roleLdapName : ldapRoles) {
|
||||
Set<Map.Entry<String, Settings>> nameRoleSearchBaseKeys = resultRoleSearchBaseKeys
|
||||
.get(roleLdapName);
|
||||
|
||||
if (nameRoleSearchBaseKeys == null) {
|
||||
log.error("Could not find roleSearchBaseKeys for " + roleLdapName + "; existing: "
|
||||
+ resultRoleSearchBaseKeys);
|
||||
continue;
|
||||
}
|
||||
|
||||
final Set<LdapName> nestedRoles = resolveNestedRoles(roleLdapName, connection, userRoleNames, 0,
|
||||
rolesearchEnabled, nameRoleSearchBaseKeys, nestedRoleFilter);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("{} nested roles for {}", nestedRoles.size(), roleLdapName);
|
||||
}
|
||||
|
||||
nestedReturn.addAll(nestedRoles);
|
||||
}
|
||||
|
||||
for (final LdapName roleLdapName : nestedReturn) {
|
||||
final String role = getRoleFromAttribute(roleLdapName, roleName);
|
||||
|
||||
if (!Strings.isNullOrEmpty(role)) {
|
||||
user.addRole(role);
|
||||
} else {
|
||||
log.warn("No or empty attribute '{}' for entry {}", roleName, roleLdapName);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
// DN roles, extract rolename according to config
|
||||
for (final LdapName roleLdapName : ldapRoles) {
|
||||
final String role = getRoleFromAttribute(roleLdapName, roleName);
|
||||
|
||||
if (!Strings.isNullOrEmpty(role)) {
|
||||
user.addRole(role);
|
||||
} else {
|
||||
log.warn("No or empty attribute '{}' for entry {}", roleName, roleLdapName);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// add all non-LDAP roles from user attributes to the final set of backend roles
|
||||
for (String nonLdapRoleName : nonLdapRoles) {
|
||||
user.addRole(nonLdapRoleName);
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Roles for {} -> {}", user.getName(), user.getRoles());
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("returned user: {}", user);
|
||||
}
|
||||
|
||||
} catch (final Exception e) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Unable to fill user roles due to ", e);
|
||||
}
|
||||
throw new ElasticsearchSecurityException(e.toString(), e);
|
||||
} finally {
|
||||
Utils.unbindAndCloseSilently(connection);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected Set<LdapName> resolveNestedRoles(final LdapName roleDn, final Connection ldapConnection,
|
||||
String userRoleName, int depth, final boolean rolesearchEnabled,
|
||||
Set<Map.Entry<String, Settings>> roleSearchBaseSettingsSet, final List<String> roleFilter)
|
||||
throws ElasticsearchSecurityException, LdapException {
|
||||
|
||||
if (!roleFilter.isEmpty() && WildcardMatcher.matchAny(roleFilter, roleDn.toString())) {
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Filter nested role {}", roleDn);
|
||||
}
|
||||
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
depth++;
|
||||
|
||||
final Set<LdapName> result = new HashSet<>(20);
|
||||
final HashMultimap<LdapName, Map.Entry<String, Settings>> resultRoleSearchBaseKeys = HashMultimap.create();
|
||||
|
||||
final LdapEntry e0 = LdapHelper.lookup(ldapConnection, roleDn.toString());
|
||||
|
||||
if (e0.getAttribute(userRoleName) != null) {
|
||||
final Collection<String> userRoles = e0.getAttribute(userRoleName).getStringValues();
|
||||
|
||||
for (final String possibleRoleDN : userRoles) {
|
||||
if (isValidDn(possibleRoleDN)) {
|
||||
try {
|
||||
LdapName ldapName = new LdapName(possibleRoleDN);
|
||||
result.add(ldapName);
|
||||
resultRoleSearchBaseKeys.putAll(ldapName, this.roleBaseSettings);
|
||||
} catch (InvalidNameException e) {
|
||||
// ignore
|
||||
}
|
||||
} else {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Cannot add {} as a role because its not a valid dn", possibleRoleDN);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("result nested attr count for depth {} : {}", depth, result.size());
|
||||
}
|
||||
|
||||
if (rolesearchEnabled) {
|
||||
String escapedDn = Utils.escapeStringRfc2254(roleDn.toString());
|
||||
|
||||
for (Map.Entry<String, Settings> roleSearchBaseSettingsEntry : Utils
|
||||
.getOrderedBaseSettings(roleSearchBaseSettingsSet)) {
|
||||
Settings roleSearchSettings = roleSearchBaseSettingsEntry.getValue();
|
||||
|
||||
List<LdapEntry> foundEntries = LdapHelper.search(ldapConnection,
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_ROLEBASE),
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_ROLESEARCH)
|
||||
.replace(LDAPAuthenticationBackend.ZERO_PLACEHOLDER, escapedDn)
|
||||
.replace(ONE_PLACEHOLDER, escapedDn),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Results for LDAP group search for " + escapedDn + " in base "
|
||||
+ roleSearchBaseSettingsEntry.getKey() + ":\n" + foundEntries);
|
||||
}
|
||||
|
||||
if (foundEntries != null) {
|
||||
for (final LdapEntry entry : foundEntries) {
|
||||
try {
|
||||
final LdapName dn = new LdapName(entry.getDn());
|
||||
result.add(dn);
|
||||
resultRoleSearchBaseKeys.put(dn, roleSearchBaseSettingsEntry);
|
||||
} catch (final InvalidNameException e) {
|
||||
throw new LdapException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int maxDepth = ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH_DEFAULT;
|
||||
try {
|
||||
maxDepth = settings.getAsInt(ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH,
|
||||
ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH_DEFAULT);
|
||||
} catch (Exception e) {
|
||||
log.error(ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH + " is not parseable: " + e, e);
|
||||
}
|
||||
|
||||
if (depth < maxDepth) {
|
||||
for (final LdapName nm : new HashSet<LdapName>(result)) {
|
||||
Set<Map.Entry<String, Settings>> nameRoleSearchBaseKeys = resultRoleSearchBaseKeys.get(nm);
|
||||
|
||||
if (nameRoleSearchBaseKeys == null) {
|
||||
log.error(
|
||||
"Could not find roleSearchBaseKeys for " + nm + "; existing: " + resultRoleSearchBaseKeys);
|
||||
continue;
|
||||
}
|
||||
|
||||
final Set<LdapName> in = resolveNestedRoles(nm, ldapConnection, userRoleName, depth, rolesearchEnabled,
|
||||
nameRoleSearchBaseKeys, roleFilter);
|
||||
result.addAll(in);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "ldap";
|
||||
}
|
||||
|
||||
private boolean isValidDn(final String dn) {
|
||||
|
||||
if (Strings.isNullOrEmpty(dn)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
new LdapName(dn);
|
||||
} catch (final Exception e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private String getRoleFromAttribute(final LdapName ldapName, final String role) {
|
||||
|
||||
if (ldapName == null || Strings.isNullOrEmpty(role)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if ("dn".equalsIgnoreCase(role)) {
|
||||
return ldapName.toString();
|
||||
}
|
||||
|
||||
List<Rdn> rdns = new ArrayList<>(ldapName.getRdns().size());
|
||||
rdns.addAll(ldapName.getRdns());
|
||||
|
||||
Collections.reverse(rdns);
|
||||
|
||||
for (Rdn rdn : rdns) {
|
||||
if (role.equalsIgnoreCase(rdn.getType())) {
|
||||
|
||||
if (rdn.getValue() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return String.valueOf(rdn.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
private final static Class clazz = ThreadLocalTLSSocketFactory.class;
|
||||
|
||||
private final static class Java9CL extends ClassLoader {
|
||||
|
||||
public Java9CL() {
|
||||
super();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public Java9CL(ClassLoader parent) {
|
||||
super(parent);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
@Override
|
||||
public Class loadClass(String name) throws ClassNotFoundException {
|
||||
|
||||
if (!name.equalsIgnoreCase("org.ldaptive.ssl.ThreadLocalTLSSocketFactory")) {
|
||||
return super.loadClass(name);
|
||||
}
|
||||
|
||||
return clazz;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
93
src/main/java/com/amazon/dlic/auth/ldap/util/ConfigConstants.java
Executable file
93
src/main/java/com/amazon/dlic/auth/ldap/util/ConfigConstants.java
Executable file
@ -0,0 +1,93 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap.util;
|
||||
|
||||
public final class ConfigConstants {
|
||||
|
||||
public static final String LDAP_AUTHC_USERBASE = "userbase";
|
||||
public static final String LDAP_AUTHC_USERNAME_ATTRIBUTE = "username_attribute";
|
||||
public static final String LDAP_AUTHC_USERSEARCH = "usersearch";
|
||||
|
||||
public static final String LDAP_AUTHCZ_USERS = "users";
|
||||
public static final String LDAP_AUTHZ_ROLES = "roles";
|
||||
public static final String LDAP_AUTHCZ_BASE = "base";
|
||||
public static final String LDAP_AUTHCZ_SEARCH = "search";
|
||||
|
||||
|
||||
public static final String LDAP_AUTHZ_RESOLVE_NESTED_ROLES = "resolve_nested_roles";
|
||||
public static final String LDAP_AUTHZ_ROLEBASE = "rolebase";
|
||||
public static final String LDAP_AUTHZ_ROLENAME = "rolename";
|
||||
public static final String LDAP_AUTHZ_ROLESEARCH = "rolesearch";
|
||||
public static final String LDAP_AUTHZ_USERROLEATTRIBUTE = "userroleattribute";
|
||||
public static final String LDAP_AUTHZ_USERROLENAME = "userrolename";
|
||||
public static final String LDAP_AUTHZ_SKIP_USERS = "skip_users";
|
||||
public static final String LDAP_AUTHZ_ROLESEARCH_ENABLED = "rolesearch_enabled";
|
||||
public static final String LDAP_AUTHZ_NESTEDROLEFILTER = "nested_role_filter";
|
||||
public static final String LDAP_AUTHZ_MAX_NESTED_DEPTH = "max_nested_depth";
|
||||
public static final int LDAP_AUTHZ_MAX_NESTED_DEPTH_DEFAULT = 30;
|
||||
|
||||
public static final String LDAP_HOSTS = "hosts";
|
||||
public static final String LDAP_BIND_DN = "bind_dn";
|
||||
public static final String LDAP_PASSWORD = "password";
|
||||
public static final String LDAP_FAKE_LOGIN_ENABLED = "fakelogin_enabled";
|
||||
public static final String LDAP_SEARCH_ALL_BASES = "search_all_bases";
|
||||
|
||||
public static final String LDAP_FAKE_LOGIN_DN = "fakelogin_dn";
|
||||
public static final String LDAP_FAKE_LOGIN_PASSWORD = "fakelogin_password";
|
||||
|
||||
public static final String LDAP_CONNECT_TIMEOUT = "connect_timeout"; // com.sun.jndi.ldap.connect.timeout
|
||||
public static final String LDAP_RESPONSE_TIMEOUT = "response_timeout"; // com.sun.jndi.ldap.read.timeout
|
||||
|
||||
// ssl
|
||||
public static final String LDAPS_VERIFY_HOSTNAMES = "verify_hostnames";
|
||||
public static final String LDAPS_TRUST_ALL = "trust_all";
|
||||
public static final boolean LDAPS_VERIFY_HOSTNAMES_DEFAULT = true;
|
||||
public static final String LDAPS_ENABLE_SSL = "enable_ssl";
|
||||
public static final String LDAPS_ENABLE_START_TLS = "enable_start_tls";
|
||||
public static final String LDAPS_ENABLE_SSL_CLIENT_AUTH = "enable_ssl_client_auth";
|
||||
public static final boolean LDAPS_ENABLE_SSL_CLIENT_AUTH_DEFAULT = false;
|
||||
|
||||
public static final String LDAPS_JKS_CERT_ALIAS = "cert_alias";
|
||||
public static final String LDAPS_JKS_TRUST_ALIAS = "ca_alias";
|
||||
|
||||
public static final String LDAPS_PEMKEY_FILEPATH = "pemkey_filepath";
|
||||
public static final String LDAPS_PEMKEY_CONTENT = "pemkey_content";
|
||||
public static final String LDAPS_PEMKEY_PASSWORD = "pemkey_password";
|
||||
public static final String LDAPS_PEMCERT_FILEPATH = "pemcert_filepath";
|
||||
public static final String LDAPS_PEMCERT_CONTENT = "pemcert_content";
|
||||
public static final String LDAPS_PEMTRUSTEDCAS_FILEPATH = "pemtrustedcas_filepath";
|
||||
public static final String LDAPS_PEMTRUSTEDCAS_CONTENT = "pemtrustedcas_content";
|
||||
|
||||
public static final String LDAPS_ENABLED_SSL_CIPHERS = "enabled_ssl_ciphers";
|
||||
public static final String LDAPS_ENABLED_SSL_PROTOCOLS = "enabled_ssl_protocols";
|
||||
|
||||
// custom attributes
|
||||
public static final String LDAP_CUSTOM_ATTR_MAXVAL_LEN = "custom_attr_maxval_len";
|
||||
public static final String LDAP_CUSTOM_ATTR_WHITELIST = "custom_attr_whitelist";
|
||||
|
||||
public static final String LDAP_CONNECTION_STRATEGY = "connection_strategy";
|
||||
|
||||
public static final String LDAP_POOL_ENABLED = "pool.enabled";
|
||||
public static final String LDAP_POOL_MIN_SIZE = "pool.min_size";
|
||||
public static final String LDAP_POOL_MAX_SIZE = "pool.max_size";
|
||||
|
||||
public static final String LDAP_POOL_TYPE = "pool.type";
|
||||
|
||||
private ConfigConstants() {
|
||||
|
||||
}
|
||||
|
||||
}
|
88
src/main/java/com/amazon/dlic/auth/ldap/util/LdapHelper.java
Normal file
88
src/main/java/com/amazon/dlic/auth/ldap/util/LdapHelper.java
Normal file
@ -0,0 +1,88 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap.util;
|
||||
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.ldaptive.Connection;
|
||||
import org.ldaptive.DerefAliases;
|
||||
import org.ldaptive.LdapEntry;
|
||||
import org.ldaptive.LdapException;
|
||||
import org.ldaptive.Response;
|
||||
import org.ldaptive.ReturnAttributes;
|
||||
import org.ldaptive.SearchOperation;
|
||||
import org.ldaptive.SearchRequest;
|
||||
import org.ldaptive.SearchResult;
|
||||
import org.ldaptive.SearchScope;
|
||||
import org.ldaptive.referral.SearchReferralHandler;
|
||||
|
||||
public class LdapHelper {
|
||||
|
||||
public static List<LdapEntry> search(final Connection conn, final String baseDn, final String filter,
|
||||
final SearchScope searchScope) throws LdapException {
|
||||
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<List<LdapEntry>>() {
|
||||
@Override
|
||||
public List<LdapEntry> run() throws Exception {
|
||||
final List<LdapEntry> entries = new ArrayList<>();
|
||||
final SearchRequest request = new SearchRequest(baseDn, filter);
|
||||
request.setReferralHandler(new SearchReferralHandler());
|
||||
request.setSearchScope(searchScope);
|
||||
request.setDerefAliases(DerefAliases.ALWAYS);
|
||||
request.setReturnAttributes(ReturnAttributes.ALL.value());
|
||||
final SearchOperation search = new SearchOperation(conn);
|
||||
// referrals will be followed to build the response
|
||||
final Response<SearchResult> r = search.execute(request);
|
||||
final org.ldaptive.SearchResult result = r.getResult();
|
||||
entries.addAll(result.getEntries());
|
||||
return entries;
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getException() instanceof LdapException) {
|
||||
throw (LdapException) e.getException();
|
||||
} else if (e.getException() instanceof RuntimeException) {
|
||||
throw (RuntimeException) e.getException();
|
||||
} else {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static LdapEntry lookup(final Connection conn, final String dn) throws LdapException {
|
||||
|
||||
final List<LdapEntry> entries = search(conn, dn, "(objectClass=*)", SearchScope.OBJECT);
|
||||
|
||||
if (entries.size() == 1) {
|
||||
return entries.get(0);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
139
src/main/java/com/amazon/dlic/auth/ldap/util/Utils.java
Normal file
139
src/main/java/com/amazon/dlic/auth/ldap/util/Utils.java
Normal file
@ -0,0 +1,139 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap.util;
|
||||
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.ldaptive.Connection;
|
||||
|
||||
public final class Utils {
|
||||
|
||||
private static final String RFC2254_ESCAPE_CHARS = "\\*()\000";
|
||||
|
||||
private Utils() {
|
||||
|
||||
}
|
||||
|
||||
public static void init() {
|
||||
// empty init() to allow prior initialization
|
||||
}
|
||||
|
||||
public static void unbindAndCloseSilently(final Connection connection) {
|
||||
if (connection == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
AccessController.doPrivileged(new PrivilegedExceptionAction<Object>() {
|
||||
@Override
|
||||
public Object run() throws Exception {
|
||||
connection.close();
|
||||
return null;
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* RFC 2254 string escaping
|
||||
*/
|
||||
public static String escapeStringRfc2254(final String str) {
|
||||
|
||||
if (str == null || str.length() == 0) {
|
||||
return str;
|
||||
}
|
||||
|
||||
final StringTokenizer tok = new StringTokenizer(str, RFC2254_ESCAPE_CHARS, true);
|
||||
|
||||
if (tok.countTokens() == 0) {
|
||||
return str;
|
||||
}
|
||||
|
||||
final StringBuilder out = new StringBuilder();
|
||||
while (tok.hasMoreTokens()) {
|
||||
final String s = tok.nextToken();
|
||||
|
||||
if (s.equals("*")) {
|
||||
out.append("\\2a");
|
||||
} else if (s.equals("(")) {
|
||||
out.append("\\28");
|
||||
} else if (s.equals(")")) {
|
||||
out.append("\\29");
|
||||
} else if (s.equals("\\")) {
|
||||
out.append("\\5c");
|
||||
} else if (s.equals("\000")) {
|
||||
out.append("\\00");
|
||||
} else {
|
||||
out.append(s);
|
||||
}
|
||||
}
|
||||
return out.toString();
|
||||
}
|
||||
|
||||
public static List<Map.Entry<String, Settings>> getOrderedBaseSettings(Settings settings) {
|
||||
return getOrderedBaseSettings(settings.getAsGroups(true));
|
||||
}
|
||||
|
||||
public static List<Map.Entry<String, Settings>> getOrderedBaseSettings(Map<String, Settings> settingsMap) {
|
||||
return getOrderedBaseSettings(settingsMap.entrySet());
|
||||
}
|
||||
|
||||
public static List<Map.Entry<String, Settings>> getOrderedBaseSettings(Set<Map.Entry<String, Settings>> set) {
|
||||
List<Map.Entry<String, Settings>> result = new ArrayList<>(set);
|
||||
|
||||
sortBaseSettings(result);
|
||||
|
||||
return Collections.unmodifiableList(result);
|
||||
}
|
||||
|
||||
private static void sortBaseSettings(List<Map.Entry<String, Settings>> list) {
|
||||
list.sort(new Comparator<Map.Entry<String, Settings>>() {
|
||||
|
||||
@Override
|
||||
public int compare(Map.Entry<String, Settings> o1, Map.Entry<String, Settings> o2) {
|
||||
int attributeOrder = Integer.compare(o1.getValue().getAsInt("order", Integer.MAX_VALUE),
|
||||
o2.getValue().getAsInt("order", Integer.MAX_VALUE));
|
||||
|
||||
if (attributeOrder != 0) {
|
||||
return attributeOrder;
|
||||
}
|
||||
|
||||
return o1.getKey().compareTo(o2.getKey());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
227
src/main/java/com/amazon/dlic/auth/ldap2/LDAPAuthenticationBackend.java
Executable file
227
src/main/java/com/amazon/dlic/auth/ldap2/LDAPAuthenticationBackend.java
Executable file
@ -0,0 +1,227 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap2;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.ldaptive.BindRequest;
|
||||
import org.ldaptive.Connection;
|
||||
import org.ldaptive.ConnectionFactory;
|
||||
import org.ldaptive.Credential;
|
||||
import org.ldaptive.LdapEntry;
|
||||
import org.ldaptive.LdapException;
|
||||
import org.ldaptive.Response;
|
||||
import org.ldaptive.pool.ConnectionPool;
|
||||
|
||||
import com.amazon.dlic.auth.ldap.LdapUser;
|
||||
import com.amazon.dlic.auth.ldap.util.ConfigConstants;
|
||||
import com.amazon.dlic.auth.ldap.util.Utils;
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator.SSLConfigException;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.AuthenticationBackend;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.Destroyable;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
|
||||
public class LDAPAuthenticationBackend implements AuthenticationBackend, Destroyable {
|
||||
|
||||
static {
|
||||
Utils.init();
|
||||
}
|
||||
|
||||
protected static final Logger log = LogManager.getLogger(LDAPAuthenticationBackend.class);
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
private ConnectionPool connectionPool;
|
||||
private ConnectionFactory connectionFactory;
|
||||
private ConnectionFactory authConnectionFactory;
|
||||
private LDAPUserSearcher userSearcher;
|
||||
|
||||
public LDAPAuthenticationBackend(final Settings settings, final Path configPath) throws SSLConfigException {
|
||||
this.settings = settings;
|
||||
|
||||
LDAPConnectionFactoryFactory ldapConnectionFactoryFactory = new LDAPConnectionFactoryFactory(settings,
|
||||
configPath);
|
||||
|
||||
this.connectionPool = ldapConnectionFactoryFactory.createConnectionPool();
|
||||
this.connectionFactory = ldapConnectionFactoryFactory.createConnectionFactory(this.connectionPool);
|
||||
|
||||
if (this.connectionPool != null) {
|
||||
this.authConnectionFactory = ldapConnectionFactoryFactory.createBasicConnectionFactory();
|
||||
} else {
|
||||
this.authConnectionFactory = this.connectionFactory;
|
||||
}
|
||||
|
||||
this.userSearcher = new LDAPUserSearcher(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public User authenticate(final AuthCredentials credentials) throws ElasticsearchSecurityException {
|
||||
|
||||
Connection ldapConnection = null;
|
||||
final String user = Utils.escapeStringRfc2254(credentials.getUsername());
|
||||
byte[] password = credentials.getPassword();
|
||||
|
||||
try {
|
||||
|
||||
ldapConnection = connectionFactory.getConnection();
|
||||
ldapConnection.open();
|
||||
|
||||
LdapEntry entry = userSearcher.exists(ldapConnection, user);
|
||||
|
||||
// fake a user that no exists
|
||||
// makes guessing if a user exists or not harder when looking on the
|
||||
// authentication delay time
|
||||
if (entry == null && settings.getAsBoolean(ConfigConstants.LDAP_FAKE_LOGIN_ENABLED, false)) {
|
||||
String fakeLognDn = settings.get(ConfigConstants.LDAP_FAKE_LOGIN_DN,
|
||||
"CN=faketomakebindfail,DC=" + UUID.randomUUID().toString());
|
||||
entry = new LdapEntry(fakeLognDn);
|
||||
password = settings.get(ConfigConstants.LDAP_FAKE_LOGIN_PASSWORD, "fakeLoginPwd123")
|
||||
.getBytes(StandardCharsets.UTF_8);
|
||||
} else if (entry == null) {
|
||||
throw new ElasticsearchSecurityException("No user " + user + " found");
|
||||
}
|
||||
|
||||
final String dn = entry.getDn();
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Try to authenticate dn {}", dn);
|
||||
}
|
||||
|
||||
if (this.connectionPool == null) {
|
||||
authenticateByLdapServer(ldapConnection, dn, password);
|
||||
} else {
|
||||
authenticateByLdapServerWithSeparateConnection(dn, password);
|
||||
}
|
||||
|
||||
final String usernameAttribute = settings.get(ConfigConstants.LDAP_AUTHC_USERNAME_ATTRIBUTE, null);
|
||||
String username = dn;
|
||||
|
||||
if (usernameAttribute != null && entry.getAttribute(usernameAttribute) != null) {
|
||||
username = entry.getAttribute(usernameAttribute).getStringValue();
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Authenticated username {}", username);
|
||||
}
|
||||
|
||||
final int customAttrMaxValueLen = settings.getAsInt(ConfigConstants.LDAP_CUSTOM_ATTR_MAXVAL_LEN, 36);
|
||||
final List<String> whitelistedAttributes = settings.getAsList(ConfigConstants.LDAP_CUSTOM_ATTR_WHITELIST,
|
||||
null);
|
||||
|
||||
// by default all ldap attributes which are not binary and with a max value
|
||||
// length of 36 are included in the user object
|
||||
// if the whitelist contains at least one value then all attributes will be
|
||||
// additional check if whitelisted (whitelist can contain wildcard and regex)
|
||||
return new LdapUser(username, user, entry, credentials, customAttrMaxValueLen, whitelistedAttributes);
|
||||
|
||||
} catch (final Exception e) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Unable to authenticate user due to ", e);
|
||||
}
|
||||
throw new ElasticsearchSecurityException(e.toString(), e);
|
||||
} finally {
|
||||
Arrays.fill(password, (byte) '\0');
|
||||
password = null;
|
||||
Utils.unbindAndCloseSilently(ldapConnection);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "ldap";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean exists(final User user) {
|
||||
Connection ldapConnection = null;
|
||||
String userName = user.getName();
|
||||
|
||||
if (user instanceof LdapUser) {
|
||||
userName = ((LdapUser) user).getUserEntry().getDn();
|
||||
}
|
||||
|
||||
try {
|
||||
ldapConnection = this.connectionFactory.getConnection();
|
||||
ldapConnection.open();
|
||||
return this.userSearcher.exists(ldapConnection, userName) != null;
|
||||
} catch (final Exception e) {
|
||||
log.warn("User {} does not exist due to " + e, userName);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("User does not exist due to ", e);
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
Utils.unbindAndCloseSilently(ldapConnection);
|
||||
}
|
||||
}
|
||||
|
||||
private void authenticateByLdapServer(final Connection connection, final String dn, byte[] password)
|
||||
throws LdapException {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
AccessController.doPrivileged(new PrivilegedExceptionAction<Response<Void>>() {
|
||||
@Override
|
||||
public Response<Void> run() throws LdapException {
|
||||
return connection.getProviderConnection().bind(new BindRequest(dn, new Credential(password)));
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getException() instanceof LdapException) {
|
||||
throw (LdapException) e.getException();
|
||||
} else if (e.getException() instanceof RuntimeException) {
|
||||
throw (RuntimeException) e.getException();
|
||||
} else {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void authenticateByLdapServerWithSeparateConnection(final String dn, byte[] password) throws LdapException {
|
||||
try (Connection unpooledConnection = this.authConnectionFactory.getConnection()) {
|
||||
unpooledConnection.open();
|
||||
authenticateByLdapServer(unpooledConnection, dn, password);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy() {
|
||||
if (this.connectionPool != null) {
|
||||
this.connectionPool.close();
|
||||
this.connectionPool = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
539
src/main/java/com/amazon/dlic/auth/ldap2/LDAPAuthorizationBackend.java
Executable file
539
src/main/java/com/amazon/dlic/auth/ldap2/LDAPAuthorizationBackend.java
Executable file
@ -0,0 +1,539 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap2;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.naming.InvalidNameException;
|
||||
import javax.naming.ldap.LdapName;
|
||||
import javax.naming.ldap.Rdn;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.ldaptive.Connection;
|
||||
import org.ldaptive.ConnectionFactory;
|
||||
import org.ldaptive.LdapAttribute;
|
||||
import org.ldaptive.LdapEntry;
|
||||
import org.ldaptive.LdapException;
|
||||
import org.ldaptive.SearchScope;
|
||||
import org.ldaptive.pool.ConnectionPool;
|
||||
|
||||
import com.amazon.dlic.auth.ldap.LdapUser;
|
||||
import com.amazon.dlic.auth.ldap.util.ConfigConstants;
|
||||
import com.amazon.dlic.auth.ldap.util.LdapHelper;
|
||||
import com.amazon.dlic.auth.ldap.util.Utils;
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator.SSLConfigException;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.AuthorizationBackend;
|
||||
import com.amazon.opendistroforelasticsearch.security.auth.Destroyable;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.WildcardMatcher;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
import com.google.common.collect.HashMultimap;
|
||||
|
||||
public class LDAPAuthorizationBackend implements AuthorizationBackend, Destroyable {
|
||||
|
||||
static final String ZERO_PLACEHOLDER = "{0}";
|
||||
static final String ONE_PLACEHOLDER = "{1}";
|
||||
static final String TWO_PLACEHOLDER = "{2}";
|
||||
static final String DEFAULT_ROLEBASE = "";
|
||||
static final String DEFAULT_ROLESEARCH = "(member={0})";
|
||||
static final String DEFAULT_ROLENAME = "name";
|
||||
static final String DEFAULT_USERROLENAME = "memberOf";
|
||||
|
||||
static {
|
||||
Utils.init();
|
||||
}
|
||||
|
||||
protected static final Logger log = LogManager.getLogger(LDAPAuthorizationBackend.class);
|
||||
private final Settings settings;
|
||||
private final List<Map.Entry<String, Settings>> roleBaseSettings;
|
||||
private ConnectionPool connectionPool;
|
||||
private ConnectionFactory connectionFactory;
|
||||
private LDAPUserSearcher userSearcher;
|
||||
|
||||
public LDAPAuthorizationBackend(final Settings settings, final Path configPath) throws SSLConfigException {
|
||||
this.settings = settings;
|
||||
this.roleBaseSettings = getRoleSearchSettings(settings);
|
||||
|
||||
LDAPConnectionFactoryFactory ldapConnectionFactoryFactory = new LDAPConnectionFactoryFactory(settings,
|
||||
configPath);
|
||||
|
||||
this.connectionPool = ldapConnectionFactoryFactory.createConnectionPool();
|
||||
this.connectionFactory = ldapConnectionFactoryFactory.createConnectionFactory(this.connectionPool);
|
||||
this.userSearcher = new LDAPUserSearcher(settings);
|
||||
}
|
||||
|
||||
private static List<Map.Entry<String, Settings>> getRoleSearchSettings(Settings settings) {
|
||||
Map<String, Settings> groupedSettings = settings.getGroups(ConfigConstants.LDAP_AUTHZ_ROLES, true);
|
||||
|
||||
if (!groupedSettings.isEmpty()) {
|
||||
// New style settings
|
||||
return Utils.getOrderedBaseSettings(groupedSettings);
|
||||
} else {
|
||||
// Old style settings
|
||||
return convertOldStyleSettingsToNewStyle(settings);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<Map.Entry<String, Settings>> convertOldStyleSettingsToNewStyle(Settings settings) {
|
||||
Map<String, Settings> result = new HashMap<>(1);
|
||||
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_BASE,
|
||||
settings.get(ConfigConstants.LDAP_AUTHZ_ROLEBASE, DEFAULT_ROLEBASE));
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_SEARCH,
|
||||
settings.get(ConfigConstants.LDAP_AUTHZ_ROLESEARCH, DEFAULT_ROLESEARCH));
|
||||
|
||||
result.put("convertedOldStyleSettings", settingsBuilder.build());
|
||||
|
||||
return Collections.singletonList(result.entrySet().iterator().next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void fillRoles(final User user, final AuthCredentials optionalAuthCreds)
|
||||
throws ElasticsearchSecurityException {
|
||||
|
||||
if (user == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
String authenticatedUser;
|
||||
String originalUserName;
|
||||
LdapEntry entry = null;
|
||||
String dn = null;
|
||||
|
||||
if (user instanceof LdapUser) {
|
||||
entry = ((LdapUser) user).getUserEntry();
|
||||
dn = entry.getDn();
|
||||
authenticatedUser = entry.getDn();
|
||||
originalUserName = ((LdapUser) user).getOriginalUsername();
|
||||
} else {
|
||||
authenticatedUser = Utils.escapeStringRfc2254(user.getName());
|
||||
originalUserName = user.getName();
|
||||
}
|
||||
|
||||
final boolean rolesearchEnabled = settings.getAsBoolean(ConfigConstants.LDAP_AUTHZ_ROLESEARCH_ENABLED, true);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Try to get roles for {}", authenticatedUser);
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("user class: {}", user.getClass());
|
||||
log.trace("authenticatedUser: {}", authenticatedUser);
|
||||
log.trace("originalUserName: {}", originalUserName);
|
||||
log.trace("entry: {}", String.valueOf(entry));
|
||||
log.trace("dn: {}", dn);
|
||||
}
|
||||
|
||||
final List<String> skipUsers = settings.getAsList(ConfigConstants.LDAP_AUTHZ_SKIP_USERS,
|
||||
Collections.emptyList());
|
||||
if (!skipUsers.isEmpty() && WildcardMatcher.matchAny(skipUsers, authenticatedUser)) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Skipped search roles of user {}", authenticatedUser);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
try (Connection connection = this.connectionFactory.getConnection()) {
|
||||
|
||||
connection.open();
|
||||
|
||||
if (entry == null || dn == null) {
|
||||
|
||||
if (isValidDn(authenticatedUser)) {
|
||||
// assume dn
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("{} is a valid DN", authenticatedUser);
|
||||
}
|
||||
|
||||
entry = LdapHelper.lookup(connection, authenticatedUser);
|
||||
|
||||
if (entry == null) {
|
||||
throw new ElasticsearchSecurityException("No user '" + authenticatedUser + "' found");
|
||||
}
|
||||
|
||||
} else {
|
||||
entry = this.userSearcher.exists(connection, user.getName());
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("{} is not a valid DN and was resolved to {}", authenticatedUser, entry);
|
||||
}
|
||||
|
||||
if (entry == null || entry.getDn() == null) {
|
||||
throw new ElasticsearchSecurityException("No user " + authenticatedUser + " found");
|
||||
}
|
||||
}
|
||||
|
||||
dn = entry.getDn();
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("User found with DN {}", dn);
|
||||
}
|
||||
}
|
||||
|
||||
final Set<LdapName> ldapRoles = new HashSet<>(150);
|
||||
final Set<String> nonLdapRoles = new HashSet<>(150);
|
||||
final HashMultimap<LdapName, Map.Entry<String, Settings>> resultRoleSearchBaseKeys = HashMultimap.create();
|
||||
|
||||
// Roles as an attribute of the user entry
|
||||
// default is userrolename: memberOf
|
||||
final String userRoleNames = settings.get(ConfigConstants.LDAP_AUTHZ_USERROLENAME, DEFAULT_USERROLENAME);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("raw userRoleName(s): {}", userRoleNames);
|
||||
}
|
||||
|
||||
// we support more than one rolenames, must be separated by a comma
|
||||
for (String userRoleName : userRoleNames.split(",")) {
|
||||
final String roleName = userRoleName.trim();
|
||||
if (entry.getAttribute(roleName) != null) {
|
||||
final Collection<String> userRoles = entry.getAttribute(roleName).getStringValues();
|
||||
for (final String possibleRoleDN : userRoles) {
|
||||
if (isValidDn(possibleRoleDN)) {
|
||||
LdapName ldapName = new LdapName(possibleRoleDN);
|
||||
ldapRoles.add(ldapName);
|
||||
resultRoleSearchBaseKeys.putAll(ldapName, this.roleBaseSettings);
|
||||
} else {
|
||||
nonLdapRoles.add(possibleRoleDN);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("User attr. ldap roles count: {}", ldapRoles.size());
|
||||
log.trace("User attr. ldap roles {}", ldapRoles);
|
||||
log.trace("User attr. non-ldap roles count: {}", nonLdapRoles.size());
|
||||
log.trace("User attr. non-ldap roles {}", nonLdapRoles);
|
||||
|
||||
}
|
||||
|
||||
// The attribute in a role entry containing the name of that role, Default is
|
||||
// "name".
|
||||
// Can also be "dn" to use the full DN as rolename.
|
||||
// rolename: name
|
||||
final String roleName = settings.get(ConfigConstants.LDAP_AUTHZ_ROLENAME, DEFAULT_ROLENAME);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("roleName: {}", roleName);
|
||||
}
|
||||
|
||||
// Specify the name of the attribute which value should be substituted with {2}
|
||||
// Substituted with an attribute value from user's directory entry, of the
|
||||
// authenticated user
|
||||
// userroleattribute: null
|
||||
final String userRoleAttributeName = settings.get(ConfigConstants.LDAP_AUTHZ_USERROLEATTRIBUTE, null);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("userRoleAttribute: {}", userRoleAttributeName);
|
||||
log.trace("rolesearch: {}", settings.get(ConfigConstants.LDAP_AUTHZ_ROLESEARCH, DEFAULT_ROLESEARCH));
|
||||
}
|
||||
|
||||
String userRoleAttributeValue = null;
|
||||
final LdapAttribute userRoleAttribute = entry.getAttribute(userRoleAttributeName);
|
||||
|
||||
if (userRoleAttribute != null) {
|
||||
userRoleAttributeValue = userRoleAttribute.getStringValue();
|
||||
}
|
||||
|
||||
if (rolesearchEnabled) {
|
||||
String escapedDn = Utils.escapeStringRfc2254(dn);
|
||||
|
||||
for (Map.Entry<String, Settings> roleSearchSettingsEntry : roleBaseSettings) {
|
||||
Settings roleSearchSettings = roleSearchSettingsEntry.getValue();
|
||||
|
||||
List<LdapEntry> rolesResult = LdapHelper.search(connection,
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_ROLEBASE),
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_ROLESEARCH)
|
||||
.replace(ZERO_PLACEHOLDER, escapedDn).replace(ONE_PLACEHOLDER, originalUserName)
|
||||
.replace(TWO_PLACEHOLDER,
|
||||
userRoleAttributeValue == null ? TWO_PLACEHOLDER : userRoleAttributeValue),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Results for LDAP group search for " + escapedDn + " in base "
|
||||
+ roleSearchSettingsEntry.getKey() + ":\n" + rolesResult);
|
||||
}
|
||||
|
||||
if (rolesResult != null && !rolesResult.isEmpty()) {
|
||||
for (final Iterator<LdapEntry> iterator = rolesResult.iterator(); iterator.hasNext();) {
|
||||
LdapEntry searchResultEntry = iterator.next();
|
||||
LdapName ldapName = new LdapName(searchResultEntry.getDn());
|
||||
ldapRoles.add(ldapName);
|
||||
resultRoleSearchBaseKeys.put(ldapName, roleSearchSettingsEntry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("roles count total {}", ldapRoles.size());
|
||||
}
|
||||
|
||||
// nested roles, makes only sense for DN style role names
|
||||
if (settings.getAsBoolean(ConfigConstants.LDAP_AUTHZ_RESOLVE_NESTED_ROLES, false)) {
|
||||
|
||||
final List<String> nestedRoleFilter = settings.getAsList(ConfigConstants.LDAP_AUTHZ_NESTEDROLEFILTER,
|
||||
Collections.emptyList());
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Evaluate nested roles");
|
||||
}
|
||||
|
||||
final Set<LdapName> nestedReturn = new HashSet<>(ldapRoles);
|
||||
|
||||
for (final LdapName roleLdapName : ldapRoles) {
|
||||
Set<Map.Entry<String, Settings>> nameRoleSearchBaseKeys = resultRoleSearchBaseKeys
|
||||
.get(roleLdapName);
|
||||
|
||||
if (nameRoleSearchBaseKeys == null) {
|
||||
log.error("Could not find roleSearchBaseKeys for " + roleLdapName + "; existing: "
|
||||
+ resultRoleSearchBaseKeys);
|
||||
continue;
|
||||
}
|
||||
|
||||
final Set<LdapName> nestedRoles = resolveNestedRoles(roleLdapName, connection, userRoleNames, 0,
|
||||
rolesearchEnabled, nameRoleSearchBaseKeys, nestedRoleFilter);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("{} nested roles for {}", nestedRoles.size(), roleLdapName);
|
||||
}
|
||||
|
||||
nestedReturn.addAll(nestedRoles);
|
||||
}
|
||||
|
||||
for (final LdapName roleLdapName : nestedReturn) {
|
||||
final String role = getRoleFromAttribute(roleLdapName, roleName);
|
||||
|
||||
if (!Strings.isNullOrEmpty(role)) {
|
||||
user.addRole(role);
|
||||
} else {
|
||||
log.warn("No or empty attribute '{}' for entry {}", roleName, roleLdapName);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
// DN roles, extract rolename according to config
|
||||
for (final LdapName roleLdapName : ldapRoles) {
|
||||
final String role = getRoleFromAttribute(roleLdapName, roleName);
|
||||
|
||||
if (!Strings.isNullOrEmpty(role)) {
|
||||
user.addRole(role);
|
||||
} else {
|
||||
log.warn("No or empty attribute '{}' for entry {}", roleName, roleLdapName);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// add all non-LDAP roles from user attributes to the final set of backend roles
|
||||
for (String nonLdapRoleName : nonLdapRoles) {
|
||||
user.addRole(nonLdapRoleName);
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Roles for {} -> {}", user.getName(), user.getRoles());
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("returned user: {}", user);
|
||||
}
|
||||
|
||||
} catch (final Exception e) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Unable to fill user roles due to ", e);
|
||||
}
|
||||
throw new ElasticsearchSecurityException(e.toString(), e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected Set<LdapName> resolveNestedRoles(final LdapName roleDn, final Connection ldapConnection,
|
||||
String userRoleName, int depth, final boolean rolesearchEnabled,
|
||||
Set<Map.Entry<String, Settings>> roleSearchBaseSettingsSet, final List<String> roleFilter)
|
||||
throws ElasticsearchSecurityException, LdapException {
|
||||
|
||||
if (!roleFilter.isEmpty() && WildcardMatcher.matchAny(roleFilter, roleDn.toString())) {
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Filter nested role {}", roleDn);
|
||||
}
|
||||
|
||||
return Collections.emptySet();
|
||||
}
|
||||
|
||||
depth++;
|
||||
|
||||
final Set<LdapName> result = new HashSet<>(20);
|
||||
final HashMultimap<LdapName, Map.Entry<String, Settings>> resultRoleSearchBaseKeys = HashMultimap.create();
|
||||
|
||||
final LdapEntry e0 = LdapHelper.lookup(ldapConnection, roleDn.toString());
|
||||
|
||||
if (e0.getAttribute(userRoleName) != null) {
|
||||
final Collection<String> userRoles = e0.getAttribute(userRoleName).getStringValues();
|
||||
|
||||
for (final String possibleRoleDN : userRoles) {
|
||||
if (isValidDn(possibleRoleDN)) {
|
||||
try {
|
||||
LdapName ldapName = new LdapName(possibleRoleDN);
|
||||
result.add(ldapName);
|
||||
resultRoleSearchBaseKeys.putAll(ldapName, this.roleBaseSettings);
|
||||
} catch (InvalidNameException e) {
|
||||
// ignore
|
||||
}
|
||||
} else {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Cannot add {} as a role because its not a valid dn", possibleRoleDN);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("result nested attr count for depth {} : {}", depth, result.size());
|
||||
}
|
||||
|
||||
if (rolesearchEnabled) {
|
||||
String escapedDn = Utils.escapeStringRfc2254(roleDn.toString());
|
||||
|
||||
for (Map.Entry<String, Settings> roleSearchBaseSettingsEntry : Utils
|
||||
.getOrderedBaseSettings(roleSearchBaseSettingsSet)) {
|
||||
Settings roleSearchSettings = roleSearchBaseSettingsEntry.getValue();
|
||||
|
||||
List<LdapEntry> foundEntries = LdapHelper.search(ldapConnection,
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_ROLEBASE),
|
||||
roleSearchSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_ROLESEARCH)
|
||||
.replace(ZERO_PLACEHOLDER, escapedDn).replace(ONE_PLACEHOLDER, escapedDn),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Results for LDAP group search for " + escapedDn + " in base "
|
||||
+ roleSearchBaseSettingsEntry.getKey() + ":\n" + foundEntries);
|
||||
}
|
||||
|
||||
if (foundEntries != null) {
|
||||
for (final LdapEntry entry : foundEntries) {
|
||||
try {
|
||||
final LdapName dn = new LdapName(entry.getDn());
|
||||
result.add(dn);
|
||||
resultRoleSearchBaseKeys.put(dn, roleSearchBaseSettingsEntry);
|
||||
} catch (final InvalidNameException e) {
|
||||
throw new LdapException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int maxDepth = ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH_DEFAULT;
|
||||
try {
|
||||
maxDepth = settings.getAsInt(ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH,
|
||||
ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH_DEFAULT);
|
||||
} catch (Exception e) {
|
||||
log.error(ConfigConstants.LDAP_AUTHZ_MAX_NESTED_DEPTH + " is not parseable: " + e, e);
|
||||
}
|
||||
|
||||
if (depth < maxDepth) {
|
||||
for (final LdapName nm : new HashSet<LdapName>(result)) {
|
||||
Set<Map.Entry<String, Settings>> nameRoleSearchBaseKeys = resultRoleSearchBaseKeys.get(nm);
|
||||
|
||||
if (nameRoleSearchBaseKeys == null) {
|
||||
log.error(
|
||||
"Could not find roleSearchBaseKeys for " + nm + "; existing: " + resultRoleSearchBaseKeys);
|
||||
continue;
|
||||
}
|
||||
|
||||
final Set<LdapName> in = resolveNestedRoles(nm, ldapConnection, userRoleName, depth, rolesearchEnabled,
|
||||
nameRoleSearchBaseKeys, roleFilter);
|
||||
result.addAll(in);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getType() {
|
||||
return "ldap";
|
||||
}
|
||||
|
||||
private boolean isValidDn(final String dn) {
|
||||
|
||||
if (Strings.isNullOrEmpty(dn)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
new LdapName(dn);
|
||||
} catch (final Exception e) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private String getRoleFromAttribute(final LdapName ldapName, final String role) {
|
||||
|
||||
if (ldapName == null || Strings.isNullOrEmpty(role)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if ("dn".equalsIgnoreCase(role)) {
|
||||
return ldapName.toString();
|
||||
}
|
||||
|
||||
List<Rdn> rdns = new ArrayList<>(ldapName.getRdns().size());
|
||||
rdns.addAll(ldapName.getRdns());
|
||||
|
||||
Collections.reverse(rdns);
|
||||
|
||||
for (Rdn rdn : rdns) {
|
||||
if (role.equalsIgnoreCase(rdn.getType())) {
|
||||
|
||||
if (rdn.getValue() == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return String.valueOf(rdn.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void destroy() {
|
||||
if (this.connectionPool != null) {
|
||||
this.connectionPool.close();
|
||||
this.connectionPool = null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,333 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap2;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.time.Duration;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.ldaptive.ActivePassiveConnectionStrategy;
|
||||
import org.ldaptive.BindConnectionInitializer;
|
||||
import org.ldaptive.CompareRequest;
|
||||
import org.ldaptive.Connection;
|
||||
import org.ldaptive.ConnectionConfig;
|
||||
import org.ldaptive.ConnectionFactory;
|
||||
import org.ldaptive.ConnectionInitializer;
|
||||
import org.ldaptive.ConnectionStrategy;
|
||||
import org.ldaptive.Credential;
|
||||
import org.ldaptive.DefaultConnectionFactory;
|
||||
import org.ldaptive.LdapAttribute;
|
||||
import org.ldaptive.RandomConnectionStrategy;
|
||||
import org.ldaptive.ReturnAttributes;
|
||||
import org.ldaptive.RoundRobinConnectionStrategy;
|
||||
import org.ldaptive.SearchFilter;
|
||||
import org.ldaptive.SearchRequest;
|
||||
import org.ldaptive.SearchScope;
|
||||
import org.ldaptive.pool.AbstractConnectionPool;
|
||||
import org.ldaptive.pool.BlockingConnectionPool;
|
||||
import org.ldaptive.pool.CompareValidator;
|
||||
import org.ldaptive.pool.ConnectionPool;
|
||||
import org.ldaptive.pool.IdlePruneStrategy;
|
||||
import org.ldaptive.pool.PoolConfig;
|
||||
import org.ldaptive.pool.PooledConnectionFactory;
|
||||
import org.ldaptive.pool.SearchValidator;
|
||||
import org.ldaptive.pool.SoftLimitConnectionPool;
|
||||
import org.ldaptive.pool.Validator;
|
||||
import org.ldaptive.provider.Provider;
|
||||
import org.ldaptive.provider.jndi.JndiProviderConfig;
|
||||
import org.ldaptive.sasl.ExternalConfig;
|
||||
import org.ldaptive.ssl.AllowAnyHostnameVerifier;
|
||||
import org.ldaptive.ssl.AllowAnyTrustManager;
|
||||
import org.ldaptive.ssl.CredentialConfig;
|
||||
import org.ldaptive.ssl.CredentialConfigFactory;
|
||||
import org.ldaptive.ssl.SslConfig;
|
||||
|
||||
import com.amazon.dlic.auth.ldap.util.ConfigConstants;
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator;
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator.SSLConfigException;
|
||||
|
||||
public class LDAPConnectionFactoryFactory {
|
||||
|
||||
private static final Logger log = LogManager.getLogger(LDAPConnectionFactoryFactory.class);
|
||||
|
||||
private final Settings settings;
|
||||
private final SettingsBasedSSLConfigurator.SSLConfig sslConfig;
|
||||
|
||||
public LDAPConnectionFactoryFactory(Settings settings, Path configPath) throws SSLConfigException {
|
||||
this.settings = settings;
|
||||
this.sslConfig = new SettingsBasedSSLConfigurator(settings, configPath, "").buildSSLConfig();
|
||||
}
|
||||
|
||||
public ConnectionFactory createConnectionFactory(ConnectionPool connectionPool) {
|
||||
if (connectionPool != null) {
|
||||
return new PooledConnectionFactory(connectionPool);
|
||||
} else {
|
||||
return createBasicConnectionFactory();
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public DefaultConnectionFactory createBasicConnectionFactory() {
|
||||
DefaultConnectionFactory result = new DefaultConnectionFactory(getConnectionConfig());
|
||||
|
||||
result.setProvider(new PrivilegedProvider((Provider<JndiProviderConfig>) result.getProvider()));
|
||||
|
||||
JndiProviderConfig jndiProviderConfig = (JndiProviderConfig) result.getProvider().getProviderConfig();
|
||||
|
||||
jndiProviderConfig.setClassLoader(MakeJava9Happy.getClassLoader());
|
||||
|
||||
if (this.sslConfig != null) {
|
||||
configureSSLinConnectionFactory(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public ConnectionPool createConnectionPool() {
|
||||
|
||||
if (!this.settings.getAsBoolean(ConfigConstants.LDAP_POOL_ENABLED, false)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
PoolConfig poolConfig = new PoolConfig();
|
||||
|
||||
poolConfig.setMinPoolSize(this.settings.getAsInt(ConfigConstants.LDAP_POOL_MIN_SIZE, 3));
|
||||
poolConfig.setMaxPoolSize(this.settings.getAsInt(ConfigConstants.LDAP_POOL_MAX_SIZE, 10));
|
||||
|
||||
if (this.settings.getAsBoolean("validation.enabled", false)) {
|
||||
poolConfig.setValidateOnCheckIn(this.settings.getAsBoolean("validation.on_checkin", false));
|
||||
poolConfig.setValidateOnCheckOut(this.settings.getAsBoolean("validation.on_checkout", false));
|
||||
poolConfig.setValidatePeriodically(this.settings.getAsBoolean("validation.periodically", true));
|
||||
poolConfig.setValidatePeriod(Duration.ofMinutes(this.settings.getAsLong("validation.period", 30l)));
|
||||
poolConfig.setValidateTimeout(Duration.ofSeconds(this.settings.getAsLong("validation.timeout", 5l)));
|
||||
}
|
||||
|
||||
AbstractConnectionPool result;
|
||||
|
||||
if ("blocking".equals(this.settings.get(ConfigConstants.LDAP_POOL_TYPE))) {
|
||||
result = new BlockingConnectionPool(poolConfig, createBasicConnectionFactory());
|
||||
} else {
|
||||
result = new SoftLimitConnectionPool(poolConfig, createBasicConnectionFactory());
|
||||
}
|
||||
|
||||
result.setValidator(getConnectionValidator());
|
||||
result.setPruneStrategy(new IdlePruneStrategy(Duration.ofMinutes(this.settings.getAsLong("pruning.period", 5l)),
|
||||
Duration.ofMinutes(this.settings.getAsLong("pruning.idleTime", 10l))));
|
||||
|
||||
result.initialize();
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private ConnectionConfig getConnectionConfig() {
|
||||
ConnectionConfig result = new ConnectionConfig(getLdapUrlString());
|
||||
|
||||
if (this.sslConfig != null) {
|
||||
configureSSL(result);
|
||||
}
|
||||
|
||||
result.setConnectionStrategy(getConnectionStrategy());
|
||||
result.setConnectionInitializer(getConnectionInitializer());
|
||||
|
||||
long connectTimeout = settings.getAsLong(ConfigConstants.LDAP_CONNECT_TIMEOUT, 5000L); // 0L means TCP
|
||||
// default timeout
|
||||
long responseTimeout = settings.getAsLong(ConfigConstants.LDAP_RESPONSE_TIMEOUT, 0L); // 0L means wait
|
||||
// infinitely
|
||||
|
||||
result.setConnectTimeout(Duration.ofMillis(connectTimeout < 0L ? 0L : connectTimeout)); // 5 sec by default
|
||||
result.setResponseTimeout(Duration.ofMillis(responseTimeout < 0L ? 0L : responseTimeout));
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("LDAP connection config:\n" + result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private ConnectionInitializer getConnectionInitializer() {
|
||||
BindConnectionInitializer result = new BindConnectionInitializer();
|
||||
|
||||
String bindDn = settings.get(ConfigConstants.LDAP_BIND_DN, null);
|
||||
String password = settings.get(ConfigConstants.LDAP_PASSWORD, null);
|
||||
|
||||
if (password != null && password.length() == 0) {
|
||||
password = null;
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("bindDn {}, password {}", bindDn, password != null ? "****" : "<not set>");
|
||||
}
|
||||
|
||||
if (bindDn != null && password == null) {
|
||||
log.error("No password given for bind_dn {}. Will try to authenticate anonymously to ldap", bindDn);
|
||||
}
|
||||
|
||||
boolean enableClientAuth = settings.getAsBoolean(ConfigConstants.LDAPS_ENABLE_SSL_CLIENT_AUTH,
|
||||
ConfigConstants.LDAPS_ENABLE_SSL_CLIENT_AUTH_DEFAULT);
|
||||
|
||||
if (bindDn != null && password != null) {
|
||||
log.debug("Will perform simple bind with bind dn");
|
||||
result.setBindDn(bindDn);
|
||||
result.setBindCredential(new Credential(password));
|
||||
|
||||
if (enableClientAuth) {
|
||||
log.warn(
|
||||
"Will perform simple bind with bind dn because to bind dn is given and overrides client cert authentication");
|
||||
}
|
||||
} else if (enableClientAuth) {
|
||||
log.debug("Will perform External SASL bind because client cert authentication is enabled");
|
||||
result.setBindSaslConfig(new ExternalConfig());
|
||||
} else {
|
||||
log.debug("Will perform anonymous bind because no bind dn or password is given");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private ConnectionStrategy getConnectionStrategy() {
|
||||
switch (this.settings.get(ConfigConstants.LDAP_CONNECTION_STRATEGY, "active_passive").toLowerCase()) {
|
||||
case "round_robin":
|
||||
return new RoundRobinConnectionStrategy();
|
||||
case "random":
|
||||
return new RandomConnectionStrategy();
|
||||
default:
|
||||
return new ActivePassiveConnectionStrategy();
|
||||
}
|
||||
}
|
||||
|
||||
private Validator<Connection> getConnectionValidator() {
|
||||
if (!this.settings.getAsBoolean("validation.enabled", false)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String validationStrategy = this.settings.get("validation.strategy", "search");
|
||||
Validator<Connection> result = null;
|
||||
|
||||
if ("compare".equalsIgnoreCase(validationStrategy)) {
|
||||
result = new CompareValidator(new CompareRequest(this.settings.get("validation.compare.dn", ""),
|
||||
new LdapAttribute(this.settings.get("validation.compare.attribute", "objectClass"),
|
||||
this.settings.get("validation.compare.value", "top"))));
|
||||
} else {
|
||||
SearchRequest searchRequest = new SearchRequest();
|
||||
searchRequest.setBaseDn(this.settings.get("validation.search.base_dn", ""));
|
||||
searchRequest.setSearchFilter(
|
||||
new SearchFilter(this.settings.get("validation.search.filter", "(objectClass=*)")));
|
||||
searchRequest.setReturnAttributes(ReturnAttributes.NONE.value());
|
||||
searchRequest.setSearchScope(SearchScope.OBJECT);
|
||||
searchRequest.setSizeLimit(1);
|
||||
|
||||
result = new SearchValidator(searchRequest);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private String getLdapUrlString() {
|
||||
// It's a bit weird that we create from structured data a plain string which is
|
||||
// later parsed again by ldaptive. But that's the way the API wants it to be.
|
||||
|
||||
List<String> ldapHosts = this.settings.getAsList(ConfigConstants.LDAP_HOSTS,
|
||||
Collections.singletonList("localhost"));
|
||||
boolean enableSSL = settings.getAsBoolean(ConfigConstants.LDAPS_ENABLE_SSL, false);
|
||||
|
||||
StringBuilder result = new StringBuilder();
|
||||
|
||||
for (String ldapHost : ldapHosts) {
|
||||
if (result.length() > 0) {
|
||||
result.append(" ");
|
||||
}
|
||||
|
||||
if (ldapHost.contains("://")) {
|
||||
result.append(ldapHost);
|
||||
} else if (enableSSL) {
|
||||
result.append("ldaps://").append(ldapHost);
|
||||
} else {
|
||||
result.append("ldap://").append(ldapHost);
|
||||
}
|
||||
}
|
||||
|
||||
return result.toString();
|
||||
}
|
||||
|
||||
private void configureSSL(ConnectionConfig config) {
|
||||
|
||||
if (this.sslConfig == null) {
|
||||
// Disabled
|
||||
return;
|
||||
}
|
||||
|
||||
SslConfig ldaptiveSslConfig = new SslConfig();
|
||||
CredentialConfig cc = CredentialConfigFactory.createKeyStoreCredentialConfig(
|
||||
this.sslConfig.getEffectiveTruststore(), this.sslConfig.getEffectiveTruststoreAliasesArray(),
|
||||
this.sslConfig.getEffectiveKeystore(), this.sslConfig.getEffectiveKeyPasswordString(),
|
||||
this.sslConfig.getEffectiveKeyAliasesArray());
|
||||
|
||||
ldaptiveSslConfig.setCredentialConfig(cc);
|
||||
|
||||
if (!this.sslConfig.isHostnameVerificationEnabled()) {
|
||||
ldaptiveSslConfig.setHostnameVerifier(new AllowAnyHostnameVerifier());
|
||||
|
||||
if (!Boolean.parseBoolean(System.getProperty("com.sun.jndi.ldap.object.disableEndpointIdentification"))) {
|
||||
log.warn("In order to disable host name verification for LDAP connections (verify_hostnames: true), "
|
||||
+ "you also need to set set the system property com.sun.jndi.ldap.object.disableEndpointIdentification to true when starting the JVM running ES. "
|
||||
+ "This applies for all Java versions released since July 2018.");
|
||||
// See:
|
||||
// https://www.oracle.com/technetwork/java/javase/8u181-relnotes-4479407.html
|
||||
// https://www.oracle.com/technetwork/java/javase/10-0-2-relnotes-4477557.html
|
||||
// https://www.oracle.com/technetwork/java/javase/11-0-1-relnotes-5032023.html
|
||||
}
|
||||
}
|
||||
|
||||
if (this.sslConfig.getSupportedCipherSuites() != null && this.sslConfig.getSupportedCipherSuites().length > 0) {
|
||||
ldaptiveSslConfig.setEnabledCipherSuites(this.sslConfig.getSupportedCipherSuites());
|
||||
}
|
||||
|
||||
ldaptiveSslConfig.setEnabledProtocols(this.sslConfig.getSupportedProtocols());
|
||||
|
||||
if (this.sslConfig.isTrustAllEnabled()) {
|
||||
ldaptiveSslConfig.setTrustManagers(new AllowAnyTrustManager());
|
||||
}
|
||||
|
||||
config.setSslConfig(ldaptiveSslConfig);
|
||||
|
||||
config.setUseSSL(true);
|
||||
config.setUseStartTLS(this.sslConfig.isStartTlsEnabled());
|
||||
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void configureSSLinConnectionFactory(DefaultConnectionFactory connectionFactory) {
|
||||
if (this.sslConfig == null) {
|
||||
// Disabled
|
||||
return;
|
||||
}
|
||||
|
||||
Map<String, Object> props = new HashMap<String, Object>();
|
||||
|
||||
if (this.sslConfig.isStartTlsEnabled() && !this.sslConfig.isHostnameVerificationEnabled()) {
|
||||
props.put("jndi.starttls.allowAnyHostname", "true");
|
||||
}
|
||||
|
||||
connectionFactory.getProvider().getProviderConfig().setProperties(props);
|
||||
|
||||
}
|
||||
}
|
146
src/main/java/com/amazon/dlic/auth/ldap2/LDAPUserSearcher.java
Normal file
146
src/main/java/com/amazon/dlic/auth/ldap2/LDAPUserSearcher.java
Normal file
@ -0,0 +1,146 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap2;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.ldaptive.Connection;
|
||||
import org.ldaptive.LdapEntry;
|
||||
import org.ldaptive.SearchScope;
|
||||
|
||||
import com.amazon.dlic.auth.ldap.util.ConfigConstants;
|
||||
import com.amazon.dlic.auth.ldap.util.LdapHelper;
|
||||
import com.amazon.dlic.auth.ldap.util.Utils;
|
||||
|
||||
public class LDAPUserSearcher {
|
||||
protected static final Logger log = LogManager.getLogger(LDAPUserSearcher.class);
|
||||
|
||||
private static final String ZERO_PLACEHOLDER = "{0}";
|
||||
private static final String DEFAULT_USERBASE = "";
|
||||
private static final String DEFAULT_USERSEARCH_PATTERN = "(sAMAccountName={0})";
|
||||
|
||||
private final Settings settings;
|
||||
private final List<Map.Entry<String, Settings>> userBaseSettings;
|
||||
|
||||
public LDAPUserSearcher(Settings settings) {
|
||||
this.settings = settings;
|
||||
this.userBaseSettings = getUserBaseSettings(settings);
|
||||
}
|
||||
|
||||
static List<Map.Entry<String, Settings>> getUserBaseSettings(Settings settings) {
|
||||
Map<String, Settings> userBaseSettingsMap = new HashMap<>(
|
||||
settings.getGroups(ConfigConstants.LDAP_AUTHCZ_USERS));
|
||||
|
||||
if (!userBaseSettingsMap.isEmpty()) {
|
||||
if (settings.hasValue(ConfigConstants.LDAP_AUTHC_USERBASE)) {
|
||||
throw new RuntimeException(
|
||||
"Both old-style and new-style configuration defined for LDAP authentication backend: "
|
||||
+ settings);
|
||||
}
|
||||
|
||||
return Utils.getOrderedBaseSettings(userBaseSettingsMap);
|
||||
} else {
|
||||
Settings.Builder settingsBuilder = Settings.builder();
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_BASE,
|
||||
settings.get(ConfigConstants.LDAP_AUTHC_USERBASE, DEFAULT_USERBASE));
|
||||
settingsBuilder.put(ConfigConstants.LDAP_AUTHCZ_SEARCH,
|
||||
settings.get(ConfigConstants.LDAP_AUTHC_USERSEARCH, DEFAULT_USERSEARCH_PATTERN));
|
||||
|
||||
return Collections.singletonList(Pair.of("_legacyConfig", settingsBuilder.build()));
|
||||
}
|
||||
}
|
||||
|
||||
LdapEntry exists(Connection ldapConnection, String user) throws Exception {
|
||||
|
||||
if (settings.getAsBoolean(ConfigConstants.LDAP_FAKE_LOGIN_ENABLED, false)
|
||||
|| settings.getAsBoolean(ConfigConstants.LDAP_SEARCH_ALL_BASES, false)
|
||||
|| settings.hasValue(ConfigConstants.LDAP_AUTHC_USERBASE)) {
|
||||
return existsSearchingAllBases(ldapConnection, user);
|
||||
} else {
|
||||
return existsSearchingUntilFirstHit(ldapConnection, user);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private LdapEntry existsSearchingUntilFirstHit(Connection ldapConnection, String user) throws Exception {
|
||||
final String username = Utils.escapeStringRfc2254(user);
|
||||
|
||||
for (Map.Entry<String, Settings> entry : userBaseSettings) {
|
||||
Settings baseSettings = entry.getValue();
|
||||
|
||||
List<LdapEntry> result = LdapHelper.search(ldapConnection,
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_USERBASE),
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_USERSEARCH_PATTERN)
|
||||
.replace(ZERO_PLACEHOLDER, username),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Results for LDAP search for " + user + " in base " + entry.getKey() + ":\n" + result);
|
||||
}
|
||||
|
||||
if (result != null && result.size() >= 1) {
|
||||
return result.get(0);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private LdapEntry existsSearchingAllBases(Connection ldapConnection, String user) throws Exception {
|
||||
final String username = Utils.escapeStringRfc2254(user);
|
||||
Set<LdapEntry> result = new HashSet<>();
|
||||
|
||||
for (Map.Entry<String, Settings> entry : userBaseSettings) {
|
||||
Settings baseSettings = entry.getValue();
|
||||
|
||||
List<LdapEntry> foundEntries = LdapHelper.search(ldapConnection,
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_BASE, DEFAULT_USERBASE),
|
||||
baseSettings.get(ConfigConstants.LDAP_AUTHCZ_SEARCH, DEFAULT_USERSEARCH_PATTERN)
|
||||
.replace(ZERO_PLACEHOLDER, username),
|
||||
SearchScope.SUBTREE);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Results for LDAP search for " + user + " in base " + entry.getKey() + ":\n" + result);
|
||||
}
|
||||
|
||||
if (foundEntries != null) {
|
||||
result.addAll(foundEntries);
|
||||
}
|
||||
}
|
||||
|
||||
if (result.isEmpty()) {
|
||||
log.debug("No user " + username + " found");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (result.size() > 1) {
|
||||
log.debug("More than one user for '" + username + "' found");
|
||||
return null;
|
||||
}
|
||||
|
||||
return result.iterator().next();
|
||||
}
|
||||
|
||||
}
|
88
src/main/java/com/amazon/dlic/auth/ldap2/MakeJava9Happy.java
Normal file
88
src/main/java/com/amazon/dlic/auth/ldap2/MakeJava9Happy.java
Normal file
@ -0,0 +1,88 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap2;
|
||||
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.ldaptive.ssl.ThreadLocalTLSSocketFactory;
|
||||
|
||||
import io.netty.util.internal.PlatformDependent;
|
||||
|
||||
public class MakeJava9Happy {
|
||||
|
||||
private static ClassLoader classLoader;
|
||||
private static boolean isJava9OrHigher = PlatformDependent.javaVersion() >= 9;;
|
||||
|
||||
static ClassLoader getClassLoader() {
|
||||
if (!isJava9OrHigher) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (classLoader == null) {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<ClassLoader>() {
|
||||
@Override
|
||||
public ClassLoader run() throws Exception {
|
||||
return new Java9CL();
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getException() instanceof RuntimeException) {
|
||||
throw (RuntimeException) e.getException();
|
||||
} else {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return classLoader;
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
private final static Class threadLocalTLSSocketFactoryClass = ThreadLocalTLSSocketFactory.class;
|
||||
|
||||
private final static class Java9CL extends ClassLoader {
|
||||
|
||||
public Java9CL() {
|
||||
super();
|
||||
}
|
||||
|
||||
@SuppressWarnings("unused")
|
||||
public Java9CL(ClassLoader parent) {
|
||||
super(parent);
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes", "unchecked" })
|
||||
@Override
|
||||
public Class loadClass(String name) throws ClassNotFoundException {
|
||||
|
||||
if (!name.equalsIgnoreCase("org.ldaptive.ssl.ThreadLocalTLSSocketFactory")) {
|
||||
return super.loadClass(name);
|
||||
}
|
||||
|
||||
return threadLocalTLSSocketFactoryClass;
|
||||
}
|
||||
}
|
||||
}
|
207
src/main/java/com/amazon/dlic/auth/ldap2/PrivilegedProvider.java
Normal file
207
src/main/java/com/amazon/dlic/auth/ldap2/PrivilegedProvider.java
Normal file
@ -0,0 +1,207 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap2;
|
||||
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.ldaptive.AddRequest;
|
||||
import org.ldaptive.BindRequest;
|
||||
import org.ldaptive.CompareRequest;
|
||||
import org.ldaptive.ConnectionConfig;
|
||||
import org.ldaptive.DeleteRequest;
|
||||
import org.ldaptive.LdapException;
|
||||
import org.ldaptive.ModifyDnRequest;
|
||||
import org.ldaptive.ModifyRequest;
|
||||
import org.ldaptive.Response;
|
||||
import org.ldaptive.SearchRequest;
|
||||
import org.ldaptive.control.RequestControl;
|
||||
import org.ldaptive.extended.ExtendedRequest;
|
||||
import org.ldaptive.extended.UnsolicitedNotificationListener;
|
||||
import org.ldaptive.provider.Provider;
|
||||
import org.ldaptive.provider.ProviderConnection;
|
||||
import org.ldaptive.provider.ProviderConnectionFactory;
|
||||
import org.ldaptive.provider.SearchIterator;
|
||||
import org.ldaptive.provider.SearchListener;
|
||||
import org.ldaptive.provider.jndi.JndiProviderConfig;
|
||||
|
||||
public class PrivilegedProvider implements Provider<JndiProviderConfig> {
|
||||
|
||||
private final Provider<JndiProviderConfig> delegate;
|
||||
|
||||
public PrivilegedProvider(Provider<JndiProviderConfig> delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public JndiProviderConfig getProviderConfig() {
|
||||
return this.delegate.getProviderConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProviderConfig(JndiProviderConfig pc) {
|
||||
this.delegate.setProviderConfig(pc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProviderConnectionFactory<JndiProviderConfig> getConnectionFactory(ConnectionConfig cc) {
|
||||
ProviderConnectionFactory<JndiProviderConfig> connectionFactory = delegate.getConnectionFactory(cc);
|
||||
|
||||
return new PrivilegedProviderConnectionFactory(connectionFactory);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Provider<JndiProviderConfig> newInstance() {
|
||||
return new PrivilegedProvider(this.delegate.newInstance());
|
||||
}
|
||||
|
||||
private static class PrivilegedProviderConnectionFactory implements ProviderConnectionFactory<JndiProviderConfig> {
|
||||
|
||||
private final ProviderConnectionFactory<JndiProviderConfig> delegate;
|
||||
|
||||
PrivilegedProviderConnectionFactory(ProviderConnectionFactory<JndiProviderConfig> delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public JndiProviderConfig getProviderConfig() {
|
||||
return this.delegate.getProviderConfig();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ProviderConnection create() throws LdapException {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<ProviderConnection>() {
|
||||
@Override
|
||||
public ProviderConnection run() throws Exception {
|
||||
return new PrivilegedProviderConnection(delegate.create(), getProviderConfig());
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getException() instanceof LdapException) {
|
||||
throw (LdapException) e.getException();
|
||||
} else if (e.getException() instanceof RuntimeException) {
|
||||
throw (RuntimeException) e.getException();
|
||||
} else {
|
||||
throw new RuntimeException(e.getException());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class PrivilegedProviderConnection implements ProviderConnection {
|
||||
private final ProviderConnection delegate;
|
||||
private final JndiProviderConfig jndiProviderConfig;
|
||||
|
||||
public PrivilegedProviderConnection(ProviderConnection delegate, JndiProviderConfig jndiProviderConfig) {
|
||||
this.delegate = delegate;
|
||||
this.jndiProviderConfig = jndiProviderConfig;
|
||||
}
|
||||
|
||||
public Response<Void> bind(BindRequest request) throws LdapException {
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
return AccessController.doPrivileged(new PrivilegedExceptionAction<Response<Void>>() {
|
||||
@Override
|
||||
public Response<Void> run() throws Exception {
|
||||
if (jndiProviderConfig.getClassLoader() != null) {
|
||||
ClassLoader originalClassLoader = Thread.currentThread().getContextClassLoader();
|
||||
|
||||
try {
|
||||
Thread.currentThread().setContextClassLoader(jndiProviderConfig.getClassLoader());
|
||||
return delegate.bind(request);
|
||||
} finally {
|
||||
Thread.currentThread().setContextClassLoader(originalClassLoader);
|
||||
}
|
||||
} else {
|
||||
return delegate.bind(request);
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
if (e.getException() instanceof LdapException) {
|
||||
throw (LdapException) e.getException();
|
||||
} else if (e.getException() instanceof RuntimeException) {
|
||||
throw (RuntimeException) e.getException();
|
||||
} else {
|
||||
throw new RuntimeException(e.getException());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Response<Void> add(AddRequest request) throws LdapException {
|
||||
return delegate.add(request);
|
||||
}
|
||||
|
||||
public Response<Boolean> compare(CompareRequest request) throws LdapException {
|
||||
return delegate.compare(request);
|
||||
}
|
||||
|
||||
public Response<Void> delete(DeleteRequest request) throws LdapException {
|
||||
return delegate.delete(request);
|
||||
}
|
||||
|
||||
public Response<Void> modify(ModifyRequest request) throws LdapException {
|
||||
return delegate.modify(request);
|
||||
}
|
||||
|
||||
public Response<Void> modifyDn(ModifyDnRequest request) throws LdapException {
|
||||
return delegate.modifyDn(request);
|
||||
}
|
||||
|
||||
public SearchIterator search(SearchRequest request) throws LdapException {
|
||||
return delegate.search(request);
|
||||
}
|
||||
|
||||
public void searchAsync(SearchRequest request, SearchListener listener) throws LdapException {
|
||||
delegate.searchAsync(request, listener);
|
||||
}
|
||||
|
||||
public void abandon(int messageId, RequestControl[] controls) throws LdapException {
|
||||
delegate.abandon(messageId, controls);
|
||||
}
|
||||
|
||||
public Response<?> extendedOperation(ExtendedRequest request) throws LdapException {
|
||||
return delegate.extendedOperation(request);
|
||||
}
|
||||
|
||||
public void addUnsolicitedNotificationListener(UnsolicitedNotificationListener listener) {
|
||||
delegate.addUnsolicitedNotificationListener(listener);
|
||||
}
|
||||
|
||||
public void removeUnsolicitedNotificationListener(UnsolicitedNotificationListener listener) {
|
||||
delegate.removeUnsolicitedNotificationListener(listener);
|
||||
}
|
||||
|
||||
public void close(RequestControl[] controls) throws LdapException {
|
||||
delegate.close(controls);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,565 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.util;
|
||||
|
||||
import java.net.Socket;
|
||||
import java.nio.file.Path;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.KeyStore;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.PrivateKey;
|
||||
import java.security.SecureRandom;
|
||||
import java.security.UnrecoverableKeyException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
import javax.net.ssl.KeyManager;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.TrustManager;
|
||||
import javax.net.ssl.X509TrustManager;
|
||||
|
||||
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
|
||||
import org.apache.http.ssl.PrivateKeyDetails;
|
||||
import org.apache.http.ssl.PrivateKeyStrategy;
|
||||
import org.apache.http.ssl.SSLContextBuilder;
|
||||
import org.apache.http.ssl.SSLContexts;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.util.SSLConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.PemKeyReader;
|
||||
import com.google.common.collect.ImmutableList;
|
||||
|
||||
public class SettingsBasedSSLConfigurator {
|
||||
private static final Logger log = LogManager.getLogger(SettingsBasedSSLConfigurator.class);
|
||||
|
||||
public static final String CERT_ALIAS = "cert_alias";
|
||||
public static final String CA_ALIAS = "ca_alias";
|
||||
public static final String ENABLE_SSL = "enable_ssl";
|
||||
|
||||
/**
|
||||
* Shall STARTTLS shall be used?
|
||||
* <p>
|
||||
* NOTE: The setting of this option is only reflected by the startTlsEnabled
|
||||
* attribute of the returned SSLConfig object. Clients of this class need to
|
||||
* take further measures to enable STARTTLS. It does not affect the
|
||||
* SSLIOSessionStrategy and SSLConnectionSocketFactory objects returned from
|
||||
* this class.
|
||||
*/
|
||||
public static final String ENABLE_START_TLS = "enable_start_tls";
|
||||
public static final String ENABLE_SSL_CLIENT_AUTH = "enable_ssl_client_auth";
|
||||
public static final String PEMKEY_FILEPATH = "pemkey_filepath";
|
||||
public static final String PEMKEY_CONTENT = "pemkey_content";
|
||||
public static final String PEMKEY_PASSWORD = "pemkey_password";
|
||||
public static final String PEMCERT_FILEPATH = "pemcert_filepath";
|
||||
public static final String PEMCERT_CONTENT = "pemcert_content";
|
||||
public static final String PEMTRUSTEDCAS_CONTENT = "pemtrustedcas_content";
|
||||
public static final String PEMTRUSTEDCAS_FILEPATH = "pemtrustedcas_filepath";
|
||||
public static final String VERIFY_HOSTNAMES = "verify_hostnames";
|
||||
public static final String TRUST_ALL = "trust_all";
|
||||
|
||||
private static final List<String> DEFAULT_TLS_PROTOCOLS = ImmutableList.of("TLSv1.2", "TLSv1.1");
|
||||
|
||||
private SSLContextBuilder sslContextBuilder;
|
||||
private final Settings settings;
|
||||
private final String settingsKeyPrefix;
|
||||
private final Path configPath;
|
||||
private final String clientName;
|
||||
|
||||
private boolean enabled;
|
||||
private boolean enableSslClientAuth;
|
||||
private KeyStore effectiveTruststore;
|
||||
private KeyStore effectiveKeystore;
|
||||
private char[] effectiveKeyPassword;
|
||||
private String effectiveKeyAlias;
|
||||
private List<String> effectiveTruststoreAliases;
|
||||
|
||||
public SettingsBasedSSLConfigurator(Settings settings, Path configPath, String settingsKeyPrefix,
|
||||
String clientName) {
|
||||
this.settings = settings;
|
||||
this.configPath = configPath;
|
||||
this.settingsKeyPrefix = normalizeSettingsKeyPrefix(settingsKeyPrefix);
|
||||
this.clientName = clientName != null ? clientName : this.settingsKeyPrefix;
|
||||
}
|
||||
|
||||
public SettingsBasedSSLConfigurator(Settings settings, Path configPath, String settingsKeyPrefix) {
|
||||
this(settings, configPath, settingsKeyPrefix, null);
|
||||
}
|
||||
|
||||
SSLContext buildSSLContext() throws SSLConfigException {
|
||||
try {
|
||||
if (isTrustAllEnabled()) {
|
||||
sslContextBuilder = new OverlyTrustfulSSLContextBuilder();
|
||||
} else {
|
||||
sslContextBuilder = SSLContexts.custom();
|
||||
}
|
||||
|
||||
configureWithSettings();
|
||||
|
||||
if (!this.enabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return sslContextBuilder.build();
|
||||
|
||||
} catch (NoSuchAlgorithmException | KeyStoreException | KeyManagementException e) {
|
||||
throw new SSLConfigException("Error while initializing SSL configuration for " + this.clientName, e);
|
||||
}
|
||||
}
|
||||
|
||||
public SSLConfig buildSSLConfig() throws SSLConfigException {
|
||||
SSLContext sslContext = buildSSLContext();
|
||||
|
||||
if (sslContext == null) {
|
||||
// disabled
|
||||
return null;
|
||||
}
|
||||
|
||||
return new SSLConfig(sslContext, getSupportedProtocols(), getSupportedCipherSuites(), getHostnameVerifier(),
|
||||
isHostnameVerificationEnabled(), isTrustAllEnabled(), isStartTlsEnabled(), this.effectiveTruststore,
|
||||
this.effectiveTruststoreAliases, this.effectiveKeystore, this.effectiveKeyPassword,
|
||||
this.effectiveKeyAlias);
|
||||
}
|
||||
|
||||
private boolean isHostnameVerificationEnabled() {
|
||||
return getSettingAsBoolean(VERIFY_HOSTNAMES, true) && !isTrustAllEnabled();
|
||||
}
|
||||
|
||||
private HostnameVerifier getHostnameVerifier() {
|
||||
if (isHostnameVerificationEnabled()) {
|
||||
return new DefaultHostnameVerifier();
|
||||
} else {
|
||||
return NoopHostnameVerifier.INSTANCE;
|
||||
}
|
||||
}
|
||||
|
||||
private String[] getSupportedProtocols() {
|
||||
return getSettingAsArray("enabled_ssl_protocols", DEFAULT_TLS_PROTOCOLS);
|
||||
}
|
||||
|
||||
private String[] getSupportedCipherSuites() {
|
||||
return getSettingAsArray("enabled_ssl_ciphers", null);
|
||||
|
||||
}
|
||||
|
||||
private boolean isStartTlsEnabled() {
|
||||
return getSettingAsBoolean(ENABLE_START_TLS, false);
|
||||
}
|
||||
|
||||
private boolean isTrustAllEnabled() {
|
||||
return getSettingAsBoolean(TRUST_ALL, false);
|
||||
}
|
||||
|
||||
private void configureWithSettings() throws SSLConfigException, NoSuchAlgorithmException, KeyStoreException {
|
||||
this.enabled = getSettingAsBoolean(ENABLE_SSL, false);
|
||||
|
||||
if (!this.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.enableSslClientAuth = getSettingAsBoolean(ENABLE_SSL_CLIENT_AUTH, false);
|
||||
|
||||
if (settings.get(settingsKeyPrefix + PEMTRUSTEDCAS_FILEPATH, null) != null
|
||||
|| settings.get(settingsKeyPrefix + PEMTRUSTEDCAS_CONTENT, null) != null) {
|
||||
initFromPem();
|
||||
} else {
|
||||
initFromKeyStore();
|
||||
}
|
||||
|
||||
if (effectiveTruststore != null) {
|
||||
sslContextBuilder.loadTrustMaterial(effectiveTruststore, null);
|
||||
}
|
||||
|
||||
if (enableSslClientAuth) {
|
||||
if (effectiveKeystore != null) {
|
||||
try {
|
||||
sslContextBuilder.loadKeyMaterial(effectiveKeystore, effectiveKeyPassword,
|
||||
new PrivateKeyStrategy() {
|
||||
|
||||
@Override
|
||||
public String chooseAlias(Map<String, PrivateKeyDetails> aliases, Socket socket) {
|
||||
if (aliases == null || aliases.isEmpty()) {
|
||||
return effectiveKeyAlias;
|
||||
}
|
||||
|
||||
if (effectiveKeyAlias == null || effectiveKeyAlias.isEmpty()) {
|
||||
return aliases.keySet().iterator().next();
|
||||
}
|
||||
|
||||
return effectiveKeyAlias;
|
||||
}
|
||||
});
|
||||
} catch (UnrecoverableKeyException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void initFromPem() throws SSLConfigException {
|
||||
X509Certificate[] trustCertificates;
|
||||
|
||||
try {
|
||||
trustCertificates = PemKeyReader.loadCertificatesFromStream(
|
||||
PemKeyReader.resolveStream(settingsKeyPrefix + PEMTRUSTEDCAS_CONTENT, settings));
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException(
|
||||
"Error loading PEM from " + settingsKeyPrefix + PEMTRUSTEDCAS_CONTENT + " for " + this.clientName,
|
||||
e);
|
||||
}
|
||||
|
||||
if (trustCertificates == null) {
|
||||
String path = PemKeyReader.resolve(settingsKeyPrefix + PEMTRUSTEDCAS_FILEPATH, settings, configPath,
|
||||
!isTrustAllEnabled());
|
||||
|
||||
try {
|
||||
trustCertificates = PemKeyReader.loadCertificatesFromFile(path);
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException("Error loading PEM from " + path + " (" + settingsKeyPrefix
|
||||
+ PEMTRUSTEDCAS_FILEPATH + ") for " + this.clientName, e);
|
||||
}
|
||||
}
|
||||
|
||||
// for client authentication
|
||||
X509Certificate[] authenticationCertificate;
|
||||
|
||||
try {
|
||||
authenticationCertificate = PemKeyReader.loadCertificatesFromStream(
|
||||
PemKeyReader.resolveStream(settingsKeyPrefix + PEMCERT_CONTENT, settings));
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException(
|
||||
"Error loading PEM from " + settingsKeyPrefix + PEMCERT_CONTENT + " for " + this.clientName, e);
|
||||
}
|
||||
|
||||
if (authenticationCertificate == null) {
|
||||
String path = PemKeyReader.resolve(settingsKeyPrefix + PEMCERT_FILEPATH, settings, configPath,
|
||||
enableSslClientAuth);
|
||||
|
||||
try {
|
||||
authenticationCertificate = PemKeyReader.loadCertificatesFromFile(path);
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException("Error loading PEM from " + path + " (" + settingsKeyPrefix
|
||||
+ PEMCERT_FILEPATH + ") for " + this.clientName, e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
PrivateKey authenticationKey;
|
||||
|
||||
try {
|
||||
authenticationKey = PemKeyReader.loadKeyFromStream(getSetting(PEMKEY_PASSWORD),
|
||||
PemKeyReader.resolveStream(settingsKeyPrefix + PEMKEY_CONTENT, settings));
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException(
|
||||
"Error loading PEM from " + settingsKeyPrefix + PEMKEY_CONTENT + " for " + this.clientName, e);
|
||||
}
|
||||
|
||||
if (authenticationKey == null) {
|
||||
String path = PemKeyReader.resolve(settingsKeyPrefix + PEMKEY_FILEPATH, settings, configPath,
|
||||
enableSslClientAuth);
|
||||
|
||||
try {
|
||||
authenticationKey = PemKeyReader.loadKeyFromFile(getSetting(PEMKEY_PASSWORD), path);
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException("Error loading PEM from " + path + " (" + settingsKeyPrefix
|
||||
+ PEMKEY_FILEPATH + ") for " + this.clientName, e);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
effectiveKeyPassword = PemKeyReader.randomChars(12);
|
||||
effectiveKeyAlias = "al";
|
||||
effectiveTruststore = PemKeyReader.toTruststore(effectiveKeyAlias, trustCertificates);
|
||||
effectiveKeystore = PemKeyReader.toKeystore(effectiveKeyAlias, effectiveKeyPassword,
|
||||
authenticationCertificate, authenticationKey);
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException("Error initializing SSLConfig for " + this.clientName, e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void initFromKeyStore() throws SSLConfigException {
|
||||
KeyStore trustStore;
|
||||
KeyStore keyStore;
|
||||
|
||||
try {
|
||||
trustStore = PemKeyReader.loadKeyStore(
|
||||
PemKeyReader.resolve(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH, settings,
|
||||
configPath, !isTrustAllEnabled()),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_PASSWORD,
|
||||
SSLConfigConstants.DEFAULT_STORE_PASSWORD),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_TYPE));
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException("Error loading trust store from "
|
||||
+ settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH), e);
|
||||
}
|
||||
|
||||
effectiveTruststoreAliases = getSettingAsList(CA_ALIAS, null);
|
||||
|
||||
// for client authentication
|
||||
|
||||
try {
|
||||
keyStore = PemKeyReader.loadKeyStore(
|
||||
PemKeyReader.resolve(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH, settings,
|
||||
configPath, enableSslClientAuth),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_PASSWORD,
|
||||
SSLConfigConstants.DEFAULT_STORE_PASSWORD),
|
||||
settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_TYPE));
|
||||
} catch (Exception e) {
|
||||
throw new SSLConfigException("Error loading key store from "
|
||||
+ settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH), e);
|
||||
}
|
||||
|
||||
String keyStorePassword = settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_PASSWORD,
|
||||
SSLConfigConstants.DEFAULT_STORE_PASSWORD);
|
||||
effectiveKeyPassword = keyStorePassword == null || keyStorePassword.isEmpty() ? null
|
||||
: keyStorePassword.toCharArray();
|
||||
effectiveKeyAlias = getSetting(CERT_ALIAS);
|
||||
|
||||
if (enableSslClientAuth && effectiveKeyAlias == null) {
|
||||
throw new IllegalArgumentException(settingsKeyPrefix + CERT_ALIAS + " not given");
|
||||
}
|
||||
|
||||
effectiveTruststore = trustStore;
|
||||
effectiveKeystore = keyStore;
|
||||
|
||||
}
|
||||
|
||||
private String getSetting(String key) {
|
||||
return settings.get(settingsKeyPrefix + key);
|
||||
}
|
||||
|
||||
private Boolean getSettingAsBoolean(String key, Boolean defaultValue) {
|
||||
return settings.getAsBoolean(settingsKeyPrefix + key, defaultValue);
|
||||
}
|
||||
|
||||
private List<String> getSettingAsList(String key, List<String> defaultValue) {
|
||||
return settings.getAsList(settingsKeyPrefix + key, defaultValue);
|
||||
}
|
||||
|
||||
private String[] getSettingAsArray(String key, List<String> defaultValue) {
|
||||
List<String> list = getSettingAsList(key, defaultValue);
|
||||
|
||||
if (list == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return list.toArray(new String[list.size()]);
|
||||
}
|
||||
|
||||
private static String normalizeSettingsKeyPrefix(String settingsKeyPrefix) {
|
||||
if (settingsKeyPrefix == null || settingsKeyPrefix.length() == 0) {
|
||||
return "";
|
||||
} else if (!settingsKeyPrefix.endsWith(".")) {
|
||||
return settingsKeyPrefix + ".";
|
||||
} else {
|
||||
return settingsKeyPrefix;
|
||||
}
|
||||
}
|
||||
|
||||
public static class SSLConfig {
|
||||
|
||||
private final SSLContext sslContext;
|
||||
private final String[] supportedProtocols;
|
||||
private final String[] supportedCipherSuites;
|
||||
private final HostnameVerifier hostnameVerifier;
|
||||
private final boolean startTlsEnabled;
|
||||
private final boolean hostnameVerificationEnabled;
|
||||
private final boolean trustAll;
|
||||
private final KeyStore effectiveTruststore;
|
||||
private final List<String> effectiveTruststoreAliases;
|
||||
private final KeyStore effectiveKeystore;
|
||||
private final char[] effectiveKeyPassword;
|
||||
private final String effectiveKeyAlias;
|
||||
|
||||
public SSLConfig(SSLContext sslContext, String[] supportedProtocols, String[] supportedCipherSuites,
|
||||
HostnameVerifier hostnameVerifier, boolean hostnameVerificationEnabled, boolean trustAll,
|
||||
boolean startTlsEnabled, KeyStore effectiveTruststore, List<String> effectiveTruststoreAliases,
|
||||
KeyStore effectiveKeystore, char[] effectiveKeyPassword, String effectiveKeyAlias) {
|
||||
this.sslContext = sslContext;
|
||||
this.supportedProtocols = supportedProtocols;
|
||||
this.supportedCipherSuites = supportedCipherSuites;
|
||||
this.hostnameVerifier = hostnameVerifier;
|
||||
this.hostnameVerificationEnabled = hostnameVerificationEnabled;
|
||||
this.trustAll = trustAll;
|
||||
this.startTlsEnabled = startTlsEnabled;
|
||||
this.effectiveTruststore = effectiveTruststore;
|
||||
this.effectiveTruststoreAliases = effectiveTruststoreAliases;
|
||||
this.effectiveKeystore = effectiveKeystore;
|
||||
this.effectiveKeyPassword = effectiveKeyPassword;
|
||||
this.effectiveKeyAlias = effectiveKeyAlias;
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Created SSLConfig: " + this);
|
||||
}
|
||||
}
|
||||
|
||||
public SSLContext getSslContext() {
|
||||
return sslContext;
|
||||
}
|
||||
|
||||
public String[] getSupportedProtocols() {
|
||||
return supportedProtocols;
|
||||
}
|
||||
|
||||
public String[] getSupportedCipherSuites() {
|
||||
return supportedCipherSuites;
|
||||
}
|
||||
|
||||
public HostnameVerifier getHostnameVerifier() {
|
||||
return hostnameVerifier;
|
||||
}
|
||||
|
||||
public SSLIOSessionStrategy toSSLIOSessionStrategy() {
|
||||
return new SSLIOSessionStrategy(sslContext, supportedProtocols, supportedCipherSuites, hostnameVerifier);
|
||||
}
|
||||
|
||||
public SSLConnectionSocketFactory toSSLConnectionSocketFactory() {
|
||||
return new SSLConnectionSocketFactory(sslContext, supportedProtocols, supportedCipherSuites,
|
||||
hostnameVerifier);
|
||||
}
|
||||
|
||||
public boolean isStartTlsEnabled() {
|
||||
return startTlsEnabled;
|
||||
}
|
||||
|
||||
public boolean isHostnameVerificationEnabled() {
|
||||
return hostnameVerificationEnabled;
|
||||
}
|
||||
|
||||
public KeyStore getEffectiveTruststore() {
|
||||
return effectiveTruststore;
|
||||
}
|
||||
|
||||
public KeyStore getEffectiveKeystore() {
|
||||
return effectiveKeystore;
|
||||
}
|
||||
|
||||
public char[] getEffectiveKeyPassword() {
|
||||
return effectiveKeyPassword;
|
||||
}
|
||||
|
||||
public String getEffectiveKeyPasswordString() {
|
||||
if (this.effectiveKeyPassword == null) {
|
||||
return null;
|
||||
} else {
|
||||
return new String(this.effectiveKeyPassword);
|
||||
}
|
||||
}
|
||||
|
||||
public String getEffectiveKeyAlias() {
|
||||
return effectiveKeyAlias;
|
||||
}
|
||||
|
||||
public List<String> getEffectiveTruststoreAliases() {
|
||||
return effectiveTruststoreAliases;
|
||||
}
|
||||
|
||||
public String[] getEffectiveTruststoreAliasesArray() {
|
||||
if (this.effectiveTruststoreAliases == null) {
|
||||
return null;
|
||||
} else {
|
||||
return this.effectiveTruststoreAliases.toArray(new String[this.effectiveTruststoreAliases.size()]);
|
||||
}
|
||||
}
|
||||
|
||||
public String[] getEffectiveKeyAliasesArray() {
|
||||
if (this.effectiveKeyAlias == null) {
|
||||
return null;
|
||||
} else {
|
||||
return new String[] { this.effectiveKeyAlias };
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "SSLConfig [sslContext=" + sslContext + ", supportedProtocols=" + Arrays.toString(supportedProtocols)
|
||||
+ ", supportedCipherSuites=" + Arrays.toString(supportedCipherSuites) + ", hostnameVerifier="
|
||||
+ hostnameVerifier + ", startTlsEnabled=" + startTlsEnabled + ", hostnameVerificationEnabled="
|
||||
+ hostnameVerificationEnabled + ", trustAll=" + trustAll + ", effectiveTruststore="
|
||||
+ effectiveTruststore + ", effectiveTruststoreAliases=" + effectiveTruststoreAliases
|
||||
+ ", effectiveKeystore=" + effectiveKeystore + ", effectiveKeyAlias=" + effectiveKeyAlias + "]";
|
||||
}
|
||||
|
||||
public boolean isTrustAllEnabled() {
|
||||
return trustAll;
|
||||
}
|
||||
}
|
||||
|
||||
public static class SSLConfigException extends Exception {
|
||||
|
||||
private static final long serialVersionUID = 5827273100470174111L;
|
||||
|
||||
public SSLConfigException() {
|
||||
super();
|
||||
}
|
||||
|
||||
public SSLConfigException(String message, Throwable cause, boolean enableSuppression,
|
||||
boolean writableStackTrace) {
|
||||
super(message, cause, enableSuppression, writableStackTrace);
|
||||
}
|
||||
|
||||
public SSLConfigException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public SSLConfigException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public SSLConfigException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class OverlyTrustfulSSLContextBuilder extends SSLContextBuilder {
|
||||
@Override
|
||||
protected void initSSLContext(SSLContext sslContext, Collection<KeyManager> keyManagers,
|
||||
Collection<TrustManager> trustManagers, SecureRandom secureRandom) throws KeyManagementException {
|
||||
sslContext.init(!keyManagers.isEmpty() ? keyManagers.toArray(new KeyManager[keyManagers.size()]) : null,
|
||||
new TrustManager[] { new OverlyTrustfulTrustManager() }, secureRandom);
|
||||
}
|
||||
}
|
||||
|
||||
private static class OverlyTrustfulTrustManager implements X509TrustManager {
|
||||
@Override
|
||||
public void checkClientTrusted(final X509Certificate[] chain, final String authType)
|
||||
throws CertificateException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkServerTrusted(final X509Certificate[] chain, final String authType)
|
||||
throws CertificateException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public X509Certificate[] getAcceptedIssuers() {
|
||||
return new X509Certificate[0];
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,23 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
|
||||
public class DefaultObjectMapper {
|
||||
public static final ObjectMapper objectMapper = new ObjectMapper();
|
||||
}
|
||||
|
@ -0,0 +1,893 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.engine.Engine.Delete;
|
||||
import org.elasticsearch.index.engine.Engine.DeleteResult;
|
||||
import org.elasticsearch.index.engine.Engine.Index;
|
||||
import org.elasticsearch.index.engine.Engine.IndexResult;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.DefaultObjectMapper;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage.Category;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.Base64Helper;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.OpenDistroSecurityDeprecationHandler;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.WildcardMatcher;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.flipkart.zjsonpatch.JsonDiff;
|
||||
import com.google.common.io.BaseEncoding;
|
||||
|
||||
public abstract class AbstractAuditLog implements AuditLog {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
protected final ThreadPool threadPool;
|
||||
protected final IndexNameExpressionResolver resolver;
|
||||
protected final ClusterService clusterService;
|
||||
protected final Settings settings;
|
||||
protected final boolean restAuditingEnabled;
|
||||
protected final boolean transportAuditingEnabled;
|
||||
protected final boolean resolveBulkRequests;
|
||||
|
||||
protected final boolean logRequestBody;
|
||||
protected final boolean resolveIndices;
|
||||
|
||||
private List<String> ignoredAuditUsers;
|
||||
private List<String> ignoredComplianceUsersForRead;
|
||||
private List<String> ignoredComplianceUsersForWrite;
|
||||
private final List<String> ignoreAuditRequests;
|
||||
private final List<String> disabledRestCategories;
|
||||
private final List<String> disabledTransportCategories;
|
||||
private final List<String> defaultDisabledCategories = Arrays.asList(Category.AUTHENTICATED.toString(), Category.GRANTED_PRIVILEGES.toString());
|
||||
private final List<String> defaultIgnoredUsers = Arrays.asList("kibanaserver");
|
||||
private final boolean excludeSensitiveHeaders;
|
||||
|
||||
private final String opendistrosecurityIndex;
|
||||
private static final List<String> writeClasses = new ArrayList<>();
|
||||
|
||||
{
|
||||
writeClasses.add(IndexRequest.class.getSimpleName());
|
||||
writeClasses.add(UpdateRequest.class.getSimpleName());
|
||||
writeClasses.add(BulkRequest.class.getSimpleName());
|
||||
writeClasses.add(BulkShardRequest.class.getSimpleName());
|
||||
writeClasses.add(DeleteRequest.class.getSimpleName());
|
||||
}
|
||||
|
||||
protected AbstractAuditLog(Settings settings, final ThreadPool threadPool, final IndexNameExpressionResolver resolver, final ClusterService clusterService) {
|
||||
super();
|
||||
this.threadPool = threadPool;
|
||||
|
||||
this.settings = settings;
|
||||
this.resolver = resolver;
|
||||
this.clusterService = clusterService;
|
||||
|
||||
this.opendistrosecurityIndex = settings.get(ConfigConstants.OPENDISTRO_SECURITY_CONFIG_INDEX_NAME, ConfigConstants.OPENDISTRO_SECURITY_DEFAULT_CONFIG_INDEX);
|
||||
|
||||
resolveBulkRequests = settings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_RESOLVE_BULK_REQUESTS, false);
|
||||
|
||||
restAuditingEnabled = settings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_ENABLE_REST, true);
|
||||
transportAuditingEnabled = settings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_ENABLE_TRANSPORT, true);
|
||||
|
||||
disabledRestCategories = new ArrayList<>(settings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_DISABLED_REST_CATEGORIES, defaultDisabledCategories).stream()
|
||||
.map(c->c.toUpperCase()).collect(Collectors.toList()));
|
||||
|
||||
if(disabledRestCategories.size() == 1 && "NONE".equals(disabledRestCategories.get(0))) {
|
||||
disabledRestCategories.clear();
|
||||
}
|
||||
|
||||
if (disabledRestCategories.size() > 0) {
|
||||
log.info("Configured categories on rest layer to ignore: {}", disabledRestCategories);
|
||||
}
|
||||
|
||||
disabledTransportCategories = new ArrayList<>(settings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_DISABLED_TRANSPORT_CATEGORIES, defaultDisabledCategories).stream()
|
||||
.map(c->c.toUpperCase()).collect(Collectors.toList()));
|
||||
|
||||
if(disabledTransportCategories.size() == 1 && "NONE".equals(disabledTransportCategories.get(0))) {
|
||||
disabledTransportCategories.clear();
|
||||
}
|
||||
|
||||
if (disabledTransportCategories.size() > 0) {
|
||||
log.info("Configured categories on transport layer to ignore: {}", disabledTransportCategories);
|
||||
}
|
||||
|
||||
logRequestBody = settings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_LOG_REQUEST_BODY, true);
|
||||
resolveIndices = settings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_RESOLVE_INDICES, true);
|
||||
|
||||
ignoredAuditUsers = new ArrayList<>(settings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_IGNORE_USERS, defaultIgnoredUsers));
|
||||
|
||||
if(ignoredAuditUsers.size() == 1 && "NONE".equals(ignoredAuditUsers.get(0))) {
|
||||
ignoredAuditUsers.clear();
|
||||
}
|
||||
|
||||
if (ignoredAuditUsers.size() > 0) {
|
||||
log.info("Configured Users to ignore: {}", ignoredAuditUsers);
|
||||
}
|
||||
|
||||
ignoredComplianceUsersForRead = new ArrayList<>(settings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_COMPLIANCE_HISTORY_READ_IGNORE_USERS, defaultIgnoredUsers));
|
||||
|
||||
if(ignoredComplianceUsersForRead.size() == 1 && "NONE".equals(ignoredComplianceUsersForRead.get(0))) {
|
||||
ignoredComplianceUsersForRead.clear();
|
||||
}
|
||||
|
||||
if (ignoredComplianceUsersForRead.size() > 0) {
|
||||
log.info("Configured Users to ignore for read compliance events: {}", ignoredComplianceUsersForRead);
|
||||
}
|
||||
|
||||
ignoredComplianceUsersForWrite = new ArrayList<>(settings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_COMPLIANCE_HISTORY_WRITE_IGNORE_USERS, defaultIgnoredUsers));
|
||||
|
||||
if(ignoredComplianceUsersForWrite.size() == 1 && "NONE".equals(ignoredComplianceUsersForWrite.get(0))) {
|
||||
ignoredComplianceUsersForWrite.clear();
|
||||
}
|
||||
|
||||
if (ignoredComplianceUsersForWrite.size() > 0) {
|
||||
log.info("Configured Users to ignore for write compliance events: {}", ignoredComplianceUsersForWrite);
|
||||
}
|
||||
|
||||
|
||||
|
||||
ignoreAuditRequests = settings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_IGNORE_REQUESTS, Collections.emptyList());
|
||||
if (ignoreAuditRequests.size() > 0) {
|
||||
log.info("Configured Requests to ignore: {}", ignoreAuditRequests);
|
||||
}
|
||||
|
||||
// check if some categories are invalid
|
||||
for (String event : disabledRestCategories) {
|
||||
try {
|
||||
AuditMessage.Category.valueOf(event.toUpperCase());
|
||||
} catch(Exception iae) {
|
||||
log.error("Unkown category {}, please check opendistro_security.audit.config.disabled_categories settings", event);
|
||||
}
|
||||
}
|
||||
|
||||
// check if some categories are invalid
|
||||
for (String event : disabledTransportCategories) {
|
||||
try {
|
||||
AuditMessage.Category.valueOf(event.toUpperCase());
|
||||
} catch(Exception iae) {
|
||||
log.error("Unkown category {}, please check opendistro_security.audit.config.disabled_categories settings", event);
|
||||
}
|
||||
}
|
||||
|
||||
this.excludeSensitiveHeaders = settings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXCLUDE_SENSITIVE_HEADERS, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logFailedLogin(String effectiveUser, boolean securityadmin, String initiatingUser, TransportRequest request, Task task) {
|
||||
final String action = null;
|
||||
|
||||
if(!checkTransportFilter(Category.FAILED_LOGIN, action, effectiveUser, request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final TransportAddress remoteAddress = getRemoteAddress();
|
||||
final List<AuditMessage> msgs = RequestResolver.resolve(Category.FAILED_LOGIN, getOrigin(), action, null, effectiveUser, securityadmin, initiatingUser, remoteAddress, request, getThreadContextHeaders(), task, resolver, clusterService, settings, logRequestBody, resolveIndices, resolveBulkRequests, opendistrosecurityIndex, excludeSensitiveHeaders, null);
|
||||
|
||||
for(AuditMessage msg: msgs) {
|
||||
save(msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void logFailedLogin(String effectiveUser, boolean securityadmin, String initiatingUser, RestRequest request) {
|
||||
|
||||
if(!checkRestFilter(Category.FAILED_LOGIN, effectiveUser, request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
AuditMessage msg = new AuditMessage(Category.FAILED_LOGIN, clusterService, getOrigin(), Origin.REST);
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
if(request != null && logRequestBody && request.hasContentOrSourceParam()) {
|
||||
msg.addTupleToRequestBody(request.contentOrSourceParam());
|
||||
}
|
||||
|
||||
if(request != null) {
|
||||
msg.addPath(request.path());
|
||||
msg.addRestHeaders(request.getHeaders(), excludeSensitiveHeaders);
|
||||
msg.addRestParams(request.params());
|
||||
}
|
||||
|
||||
msg.addInitiatingUser(initiatingUser);
|
||||
msg.addEffectiveUser(effectiveUser);
|
||||
msg.addIsAdminDn(securityadmin);
|
||||
|
||||
save(msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logSucceededLogin(String effectiveUser, boolean securityadmin, String initiatingUser, TransportRequest request, String action, Task task) {
|
||||
|
||||
if(!checkTransportFilter(Category.AUTHENTICATED, action, effectiveUser, request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final TransportAddress remoteAddress = getRemoteAddress();
|
||||
final List<AuditMessage> msgs = RequestResolver.resolve(Category.AUTHENTICATED, getOrigin(), action, null, effectiveUser, securityadmin, initiatingUser,remoteAddress, request, getThreadContextHeaders(), task, resolver, clusterService, settings, logRequestBody, resolveIndices, resolveBulkRequests, opendistrosecurityIndex, excludeSensitiveHeaders, null);
|
||||
|
||||
for(AuditMessage msg: msgs) {
|
||||
save(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logSucceededLogin(String effectiveUser, boolean securityadmin, String initiatingUser, RestRequest request) {
|
||||
|
||||
if(!checkRestFilter(Category.AUTHENTICATED, effectiveUser, request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
AuditMessage msg = new AuditMessage(Category.AUTHENTICATED, clusterService, getOrigin(), Origin.REST);
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
if(request != null && logRequestBody && request.hasContentOrSourceParam()) {
|
||||
msg.addTupleToRequestBody(request.contentOrSourceParam());
|
||||
}
|
||||
|
||||
if(request != null) {
|
||||
msg.addPath(request.path());
|
||||
msg.addRestHeaders(request.getHeaders(), excludeSensitiveHeaders);
|
||||
msg.addRestParams(request.params());
|
||||
}
|
||||
|
||||
msg.addInitiatingUser(initiatingUser);
|
||||
msg.addEffectiveUser(effectiveUser);
|
||||
msg.addIsAdminDn(securityadmin);
|
||||
save(msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logMissingPrivileges(String privilege, String effectiveUser, RestRequest request) {
|
||||
if(!checkRestFilter(Category.MISSING_PRIVILEGES, effectiveUser, request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
AuditMessage msg = new AuditMessage(Category.MISSING_PRIVILEGES, clusterService, getOrigin(), Origin.REST);
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
if(request != null && logRequestBody && request.hasContentOrSourceParam()) {
|
||||
msg.addTupleToRequestBody(request.contentOrSourceParam());
|
||||
}
|
||||
if(request != null) {
|
||||
msg.addPath(request.path());
|
||||
msg.addRestHeaders(request.getHeaders(), excludeSensitiveHeaders);
|
||||
msg.addRestParams(request.params());
|
||||
}
|
||||
|
||||
msg.addEffectiveUser(effectiveUser);
|
||||
save(msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logMissingPrivileges(String privilege, TransportRequest request, Task task) {
|
||||
final String action = null;
|
||||
|
||||
if(!checkTransportFilter(Category.MISSING_PRIVILEGES, privilege, getUser(), request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final TransportAddress remoteAddress = getRemoteAddress();
|
||||
final List<AuditMessage> msgs = RequestResolver.resolve(Category.MISSING_PRIVILEGES, getOrigin(), action, privilege, getUser(), null, null, remoteAddress, request, getThreadContextHeaders(), task, resolver, clusterService, settings, logRequestBody, resolveIndices, resolveBulkRequests, opendistrosecurityIndex, excludeSensitiveHeaders, null);
|
||||
|
||||
for(AuditMessage msg: msgs) {
|
||||
save(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logGrantedPrivileges(String privilege, TransportRequest request, Task task) {
|
||||
final String action = null;
|
||||
|
||||
if(!checkTransportFilter(Category.GRANTED_PRIVILEGES, privilege, getUser(), request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final TransportAddress remoteAddress = getRemoteAddress();
|
||||
final List<AuditMessage> msgs = RequestResolver.resolve(Category.GRANTED_PRIVILEGES, getOrigin(), action, privilege, getUser(), null, null, remoteAddress, request, getThreadContextHeaders(), task, resolver, clusterService, settings, logRequestBody, resolveIndices, resolveBulkRequests, opendistrosecurityIndex, excludeSensitiveHeaders, null);
|
||||
|
||||
for(AuditMessage msg: msgs) {
|
||||
save(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logBadHeaders(TransportRequest request, String action, Task task) {
|
||||
|
||||
if(!checkTransportFilter(Category.BAD_HEADERS, action, getUser(), request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final TransportAddress remoteAddress = getRemoteAddress();
|
||||
final List<AuditMessage> msgs = RequestResolver.resolve(Category.BAD_HEADERS, getOrigin(), action, null, getUser(), null, null, remoteAddress, request, getThreadContextHeaders(), task, resolver, clusterService, settings, logRequestBody, resolveIndices, resolveBulkRequests, opendistrosecurityIndex, excludeSensitiveHeaders, null);
|
||||
|
||||
for(AuditMessage msg: msgs) {
|
||||
save(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logBadHeaders(RestRequest request) {
|
||||
|
||||
if(!checkRestFilter(Category.BAD_HEADERS, getUser(), request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
AuditMessage msg = new AuditMessage(Category.BAD_HEADERS, clusterService, getOrigin(), Origin.REST);
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
if(request != null && logRequestBody && request.hasContentOrSourceParam()) {
|
||||
msg.addTupleToRequestBody(request.contentOrSourceParam());
|
||||
}
|
||||
if(request != null) {
|
||||
msg.addPath(request.path());
|
||||
msg.addRestHeaders(request.getHeaders(), excludeSensitiveHeaders);
|
||||
msg.addRestParams(request.params());
|
||||
}
|
||||
|
||||
msg.addEffectiveUser(getUser());
|
||||
|
||||
save(msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logSecurityIndexAttempt(TransportRequest request, String action, Task task) {
|
||||
|
||||
if(!checkTransportFilter(Category.OPENDISTRO_SECURITY_INDEX_ATTEMPT, action, getUser(), request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final TransportAddress remoteAddress = getRemoteAddress();
|
||||
final List<AuditMessage> msgs = RequestResolver.resolve(Category.OPENDISTRO_SECURITY_INDEX_ATTEMPT, getOrigin(), action, null, getUser(), false, null, remoteAddress, request, getThreadContextHeaders(), task, resolver, clusterService, settings, logRequestBody, resolveIndices, resolveBulkRequests, opendistrosecurityIndex, excludeSensitiveHeaders, null);
|
||||
|
||||
for(AuditMessage msg: msgs) {
|
||||
save(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logSSLException(TransportRequest request, Throwable t, String action, Task task) {
|
||||
|
||||
if(!checkTransportFilter(Category.SSL_EXCEPTION, action, getUser(), request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final TransportAddress remoteAddress = getRemoteAddress();
|
||||
|
||||
final List<AuditMessage> msgs = RequestResolver.resolve(Category.SSL_EXCEPTION, Origin.TRANSPORT, action, null, getUser(), false, null, remoteAddress, request,
|
||||
getThreadContextHeaders(), task, resolver, clusterService, settings, logRequestBody, resolveIndices, resolveBulkRequests, opendistrosecurityIndex, excludeSensitiveHeaders, t);
|
||||
|
||||
for(AuditMessage msg: msgs) {
|
||||
save(msg);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logSSLException(RestRequest request, Throwable t) {
|
||||
|
||||
if(!checkRestFilter(Category.SSL_EXCEPTION, getUser(), request)) {
|
||||
return;
|
||||
}
|
||||
|
||||
AuditMessage msg = new AuditMessage(Category.SSL_EXCEPTION, clusterService, Origin.REST, Origin.REST);
|
||||
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
if(request != null && logRequestBody && request.hasContentOrSourceParam()) {
|
||||
msg.addTupleToRequestBody(request.contentOrSourceParam());
|
||||
}
|
||||
|
||||
if(request != null) {
|
||||
msg.addPath(request.path());
|
||||
msg.addRestHeaders(request.getHeaders(), excludeSensitiveHeaders);
|
||||
msg.addRestParams(request.params());
|
||||
}
|
||||
msg.addException(t);
|
||||
msg.addEffectiveUser(getUser());
|
||||
save(msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logDocumentRead(String index, String id, ShardId shardId, Map<String, String> fieldNameValues, ComplianceConfig complianceConfig) {
|
||||
|
||||
if(complianceConfig == null || !complianceConfig.readHistoryEnabledForIndex(index)) {
|
||||
return;
|
||||
}
|
||||
|
||||
final String initiatingRequestClass = threadPool.getThreadContext().getHeader(ConfigConstants.OPENDISTRO_SECURITY_INITIAL_ACTION_CLASS_HEADER);
|
||||
|
||||
if(initiatingRequestClass != null && writeClasses.contains(initiatingRequestClass)) {
|
||||
return;
|
||||
}
|
||||
|
||||
Category category = opendistrosecurityIndex.equals(index)?Category.COMPLIANCE_INTERNAL_CONFIG_READ:Category.COMPLIANCE_DOC_READ;
|
||||
|
||||
String effectiveUser = getUser();
|
||||
if(!checkComplianceFilter(category, effectiveUser, getOrigin())) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(fieldNameValues != null && !fieldNameValues.isEmpty()) {
|
||||
AuditMessage msg = new AuditMessage(category, clusterService, getOrigin(), null);
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
msg.addEffectiveUser(effectiveUser);
|
||||
msg.addIndices(new String[]{index});
|
||||
msg.addResolvedIndices(new String[]{index});
|
||||
msg.addShardId(shardId);
|
||||
//msg.addIsAdminDn(securityadmin);
|
||||
msg.addId(id);
|
||||
|
||||
try {
|
||||
if(complianceConfig.logReadMetadataOnly()) {
|
||||
try {
|
||||
XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent);
|
||||
builder.startObject();
|
||||
builder.field("field_names", fieldNameValues.keySet());
|
||||
builder.endObject();
|
||||
builder.close();
|
||||
msg.addUnescapedJsonToRequestBody(Strings.toString(builder));
|
||||
} catch (IOException e) {
|
||||
log.error(e.toString(), e);
|
||||
}
|
||||
} else {
|
||||
if(opendistrosecurityIndex.equals(index) && !"tattr".equals(id)) {
|
||||
try {
|
||||
Map<String, String> map = fieldNameValues.entrySet().stream()
|
||||
.collect(Collectors.toMap(entry -> "id", entry -> new String(BaseEncoding.base64().decode(((Entry<String, String>) entry).getValue()), StandardCharsets.UTF_8)));
|
||||
msg.addMapToRequestBody(Utils.convertJsonToxToStructuredMap(map.get("id")));
|
||||
} catch (Exception e) {
|
||||
msg.addMapToRequestBody(new HashMap<String, Object>(fieldNameValues));
|
||||
}
|
||||
} else {
|
||||
msg.addMapToRequestBody(new HashMap<String, Object>(fieldNameValues));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to generate request body for {} and {}",msg.toPrettyString(),fieldNameValues, e);
|
||||
}
|
||||
|
||||
save(msg);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logDocumentWritten(ShardId shardId, GetResult originalResult, Index currentIndex, IndexResult result, ComplianceConfig complianceConfig) {
|
||||
|
||||
if(complianceConfig == null || !complianceConfig.writeHistoryEnabledForIndex(shardId.getIndexName())) {
|
||||
return;
|
||||
}
|
||||
|
||||
Category category = opendistrosecurityIndex.equals(shardId.getIndexName())?Category.COMPLIANCE_INTERNAL_CONFIG_WRITE:Category.COMPLIANCE_DOC_WRITE;
|
||||
|
||||
String effectiveUser = getUser();
|
||||
|
||||
if(!checkComplianceFilter(category, effectiveUser, getOrigin())) {
|
||||
return;
|
||||
}
|
||||
|
||||
AuditMessage msg = new AuditMessage(category, clusterService, getOrigin(), null);
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
msg.addEffectiveUser(effectiveUser);
|
||||
msg.addIndices(new String[]{shardId.getIndexName()});
|
||||
msg.addResolvedIndices(new String[]{shardId.getIndexName()});
|
||||
msg.addId(currentIndex.id());
|
||||
msg.addType(currentIndex.type());
|
||||
msg.addShardId(shardId);
|
||||
msg.addComplianceDocVersion(result.getVersion());
|
||||
msg.addComplianceOperation(result.isCreated()?Operation.CREATE:Operation.UPDATE);
|
||||
|
||||
if(complianceConfig.logDiffsForWrite() && originalResult != null && originalResult.isExists() && originalResult.internalSourceRef() != null) {
|
||||
try {
|
||||
String originalSource = null;
|
||||
String currentSource = null;
|
||||
if (opendistrosecurityIndex.equals(shardId.getIndexName())) {
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, OpenDistroSecurityDeprecationHandler.INSTANCE, originalResult.internalSourceRef(), XContentType.JSON)) {
|
||||
Object base64 = parser.map().values().iterator().next();
|
||||
if(base64 instanceof String) {
|
||||
originalSource = (new String(BaseEncoding.base64().decode((String) base64)));
|
||||
} else {
|
||||
originalSource = XContentHelper.convertToJson(originalResult.internalSourceRef(), false, XContentType.JSON);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error(e);
|
||||
}
|
||||
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, OpenDistroSecurityDeprecationHandler.INSTANCE, currentIndex.source(), XContentType.JSON)) {
|
||||
Object base64 = parser.map().values().iterator().next();
|
||||
if(base64 instanceof String) {
|
||||
currentSource = (new String(BaseEncoding.base64().decode((String) base64)));
|
||||
} else {
|
||||
currentSource = XContentHelper.convertToJson(currentIndex.source(), false, XContentType.JSON);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error(e);
|
||||
}
|
||||
} else {
|
||||
originalSource = XContentHelper.convertToJson(originalResult.internalSourceRef(), false, XContentType.JSON);
|
||||
currentSource = XContentHelper.convertToJson(currentIndex.source(), false, XContentType.JSON);
|
||||
}
|
||||
final JsonNode diffnode = JsonDiff.asJson(DefaultObjectMapper.objectMapper.readTree(originalSource), DefaultObjectMapper.objectMapper.readTree(currentSource));
|
||||
msg.addComplianceWriteDiffSource(diffnode.size() == 0?"":diffnode.toString());
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to generate diff for {}",msg.toPrettyString(),e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!complianceConfig.logWriteMetadataOnly()){
|
||||
if(opendistrosecurityIndex.equals(shardId.getIndexName())) {
|
||||
//current source, normally not null or empty
|
||||
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, OpenDistroSecurityDeprecationHandler.INSTANCE, currentIndex.source(), XContentType.JSON)) {
|
||||
Object base64 = parser.map().values().iterator().next();
|
||||
if(base64 instanceof String) {
|
||||
msg.addUnescapedJsonToRequestBody(new String(BaseEncoding.base64().decode((String) base64)));
|
||||
} else {
|
||||
msg.addTupleToRequestBody(new Tuple<XContentType, BytesReference>(XContentType.JSON, currentIndex.source()));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error(e);
|
||||
}
|
||||
|
||||
//if we want to have msg.ComplianceWritePreviousSource we need to do the same as above
|
||||
|
||||
} else {
|
||||
|
||||
//previous source, can be null if document is a new one
|
||||
//msg.ComplianceWritePreviousSource(new Tuple<XContentType, BytesReference>(XContentType.JSON, originalResult.internalSourceRef()));
|
||||
|
||||
//current source, normally not null or empty
|
||||
msg.addTupleToRequestBody(new Tuple<XContentType, BytesReference>(XContentType.JSON, currentIndex.source()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
save(msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logDocumentDeleted(ShardId shardId, Delete delete, DeleteResult result) {
|
||||
|
||||
String effectiveUser = getUser();
|
||||
|
||||
if(!checkComplianceFilter(Category.COMPLIANCE_DOC_WRITE, effectiveUser, getOrigin())) {
|
||||
return;
|
||||
}
|
||||
|
||||
AuditMessage msg = new AuditMessage(Category.COMPLIANCE_DOC_WRITE, clusterService, getOrigin(), null);
|
||||
TransportAddress remoteAddress = getRemoteAddress();
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
msg.addEffectiveUser(effectiveUser);
|
||||
msg.addIndices(new String[]{shardId.getIndexName()});
|
||||
msg.addResolvedIndices(new String[]{shardId.getIndexName()});
|
||||
msg.addId(delete.id());
|
||||
msg.addType(delete.type());
|
||||
msg.addShardId(shardId);
|
||||
msg.addComplianceDocVersion(result.getVersion());
|
||||
msg.addComplianceOperation(Operation.DELETE);
|
||||
save(msg);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void logExternalConfig(Settings settings, Environment environment) {
|
||||
|
||||
if(!checkComplianceFilter(Category.COMPLIANCE_EXTERNAL_CONFIG, null, getOrigin())) {
|
||||
return;
|
||||
}
|
||||
|
||||
final Map<String, Object> configAsMap = Utils.convertJsonToxToStructuredMap(settings);
|
||||
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
final Map<String, String> envAsMap = AccessController.doPrivileged(new PrivilegedAction<Map<String, String>>() {
|
||||
@Override
|
||||
public Map<String, String> run() {
|
||||
return System.getenv();
|
||||
}
|
||||
});
|
||||
|
||||
final Map propsAsMap = AccessController.doPrivileged(new PrivilegedAction<Map>() {
|
||||
@Override
|
||||
public Map run() {
|
||||
return System.getProperties();
|
||||
}
|
||||
});
|
||||
|
||||
final String sha256 = DigestUtils.sha256Hex(configAsMap.toString()+envAsMap.toString()+propsAsMap.toString());
|
||||
AuditMessage msg = new AuditMessage(Category.COMPLIANCE_EXTERNAL_CONFIG, clusterService, null, null);
|
||||
|
||||
try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) {
|
||||
builder.startObject();
|
||||
builder.startObject("external_configuration");
|
||||
builder.field("elasticsearch_yml", configAsMap);
|
||||
builder.field("os_environment", envAsMap);
|
||||
builder.field("java_properties", propsAsMap);
|
||||
builder.field("sha256_checksum", sha256);
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
builder.close();
|
||||
msg.addUnescapedJsonToRequestBody(Strings.toString(builder));
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to build message",e);
|
||||
}
|
||||
|
||||
Map<String, Path> paths = new HashMap<String, Path>();
|
||||
for(String key: settings.keySet()) {
|
||||
if(key.startsWith("opendistro_security") &&
|
||||
(key.contains("filepath") || key.contains("file_path"))) {
|
||||
String value = settings.get(key);
|
||||
if(value != null && !value.isEmpty()) {
|
||||
Path path = value.startsWith("/")?Paths.get(value):environment.configFile().resolve(value);
|
||||
paths.put(key, path);
|
||||
}
|
||||
}
|
||||
}
|
||||
msg.addFileInfos(paths);
|
||||
|
||||
|
||||
save(msg);
|
||||
}
|
||||
|
||||
private Origin getOrigin() {
|
||||
String origin = (String) threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_ORIGIN);
|
||||
|
||||
if(origin == null && threadPool.getThreadContext().getHeader(ConfigConstants.OPENDISTRO_SECURITY_ORIGIN_HEADER) != null) {
|
||||
origin = threadPool.getThreadContext().getHeader(ConfigConstants.OPENDISTRO_SECURITY_ORIGIN_HEADER);
|
||||
}
|
||||
|
||||
return origin == null?null:Origin.valueOf(origin);
|
||||
}
|
||||
|
||||
private TransportAddress getRemoteAddress() {
|
||||
TransportAddress address = threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS);
|
||||
if(address == null && threadPool.getThreadContext().getHeader(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS_HEADER) != null) {
|
||||
address = new TransportAddress((InetSocketAddress) Base64Helper.deserializeObject(threadPool.getThreadContext().getHeader(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS_HEADER)));
|
||||
}
|
||||
return address;
|
||||
}
|
||||
|
||||
private String getUser() {
|
||||
User user = threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER);
|
||||
if(user == null && threadPool.getThreadContext().getHeader(ConfigConstants.OPENDISTRO_SECURITY_USER_HEADER) != null) {
|
||||
user = (User) Base64Helper.deserializeObject(threadPool.getThreadContext().getHeader(ConfigConstants.OPENDISTRO_SECURITY_USER_HEADER));
|
||||
}
|
||||
return user==null?null:user.getName();
|
||||
}
|
||||
|
||||
private Map<String, String> getThreadContextHeaders() {
|
||||
return threadPool.getThreadContext().getHeaders();
|
||||
}
|
||||
|
||||
private boolean checkTransportFilter(final Category category, final String action, final String effectiveUser, TransportRequest request) {
|
||||
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Check category:{}, action:{}, effectiveUser:{}, request:{}", category, action, effectiveUser, request==null?null:request.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
|
||||
if(!transportAuditingEnabled) {
|
||||
//ignore for certain categories
|
||||
if(category != Category.FAILED_LOGIN
|
||||
&& category != Category.MISSING_PRIVILEGES
|
||||
&& category != Category.OPENDISTRO_SECURITY_INDEX_ATTEMPT) {
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//skip internals
|
||||
if(action != null
|
||||
&&
|
||||
( action.startsWith("internal:")
|
||||
|| action.startsWith("cluster:monitor")
|
||||
|| action.startsWith("indices:monitor")
|
||||
)
|
||||
) {
|
||||
|
||||
|
||||
//if(log.isTraceEnabled()) {
|
||||
// log.trace("Skipped audit log message due to category ({}) or action ({}) does not match", category, action);
|
||||
//}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (ignoredAuditUsers.size() > 0 && WildcardMatcher.matchAny(ignoredAuditUsers, effectiveUser)) {
|
||||
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped audit log message because of user {} is ignored", effectiveUser);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (request != null && ignoreAuditRequests.size() > 0
|
||||
&& (WildcardMatcher.matchAny(ignoreAuditRequests, action) || WildcardMatcher.matchAny(ignoreAuditRequests, request.getClass().getSimpleName()))) {
|
||||
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped audit log message because request {} is ignored", action+"#"+request.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!disabledTransportCategories.contains(category.toString())) {
|
||||
return true;
|
||||
} else {
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped audit log message because category {} not enabled", category);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
//skip cluster:monitor, index:monitor, internal:*
|
||||
//check transport audit enabled
|
||||
//check category enabled
|
||||
//check action
|
||||
//check ignoreAuditUsers
|
||||
|
||||
}
|
||||
|
||||
private boolean checkComplianceFilter(final Category category, final String effectiveUser, Origin origin) {
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Check for COMPLIANCE category:{}, effectiveUser:{}, origin: {}", category, effectiveUser, origin);
|
||||
}
|
||||
|
||||
if(origin == Origin.LOCAL && effectiveUser == null && category != Category.COMPLIANCE_EXTERNAL_CONFIG) {
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped compliance log message because of null user and local origin");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if(category == Category.COMPLIANCE_DOC_READ || category == Category.COMPLIANCE_INTERNAL_CONFIG_READ) {
|
||||
if (ignoredComplianceUsersForRead.size() > 0 && effectiveUser != null
|
||||
&& WildcardMatcher.matchAny(ignoredComplianceUsersForRead, effectiveUser)) {
|
||||
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped compliance log message because of user {} is ignored", effectiveUser);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if(category == Category.COMPLIANCE_DOC_WRITE || category == Category.COMPLIANCE_INTERNAL_CONFIG_WRITE) {
|
||||
if (ignoredComplianceUsersForWrite.size() > 0 && effectiveUser != null
|
||||
&& WildcardMatcher.matchAny(ignoredComplianceUsersForWrite, effectiveUser)) {
|
||||
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped compliance log message because of user {} is ignored", effectiveUser);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
private boolean checkRestFilter(final Category category, final String effectiveUser, RestRequest request) {
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Check for REST category:{}, effectiveUser:{}, request:{}", category, effectiveUser, request==null?null:request.path());
|
||||
}
|
||||
|
||||
if(!restAuditingEnabled) {
|
||||
//ignore for certain categories
|
||||
if(category != Category.FAILED_LOGIN
|
||||
&& category != Category.MISSING_PRIVILEGES
|
||||
&& category != Category.OPENDISTRO_SECURITY_INDEX_ATTEMPT) {
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (ignoredAuditUsers.size() > 0 && WildcardMatcher.matchAny(ignoredAuditUsers, effectiveUser)) {
|
||||
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped audit log message because of user {} is ignored", effectiveUser);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (request != null && ignoreAuditRequests.size() > 0
|
||||
&& (WildcardMatcher.matchAny(ignoreAuditRequests, request.path()))) {
|
||||
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped audit log message because request {} is ignored", request.path());
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!disabledRestCategories.contains(category.toString())) {
|
||||
return true;
|
||||
} else {
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("Skipped audit log message because category {} not enabled", category);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
//check rest audit enabled
|
||||
//check category enabled
|
||||
//check action
|
||||
//check ignoreAuditUsers
|
||||
}
|
||||
|
||||
|
||||
protected abstract void save(final AuditMessage msg);
|
||||
}
|
@ -0,0 +1,94 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.routing.AuditMessageRouter;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public final class AuditLogImpl extends AbstractAuditLog {
|
||||
|
||||
private final AuditMessageRouter messageRouter;
|
||||
private final boolean enabled;
|
||||
|
||||
public AuditLogImpl(final Settings settings, final Path configPath, Client clientProvider, ThreadPool threadPool,
|
||||
final IndexNameExpressionResolver resolver, final ClusterService clusterService) {
|
||||
super(settings, threadPool, resolver, clusterService);
|
||||
|
||||
this.messageRouter = new AuditMessageRouter(settings, clientProvider, threadPool, configPath);
|
||||
this.enabled = messageRouter.isEnabled();
|
||||
|
||||
log.info("Message routing enabled: {}", this.enabled);
|
||||
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
log.debug("Security Manager present");
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
AccessController.doPrivileged(new PrivilegedAction<Object>() {
|
||||
@Override
|
||||
public Object run() {
|
||||
Runtime.getRuntime().addShutdownHook(new Thread() {
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
close();
|
||||
} catch (IOException e) {
|
||||
log.warn("Exception while shutting down message router", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
log.debug("Shutdown Hook registered");
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setComplianceConfig(ComplianceConfig complianceConfig) {
|
||||
messageRouter.setComplianceConfig(complianceConfig);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
messageRouter.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void save(final AuditMessage msg) {
|
||||
if (enabled) {
|
||||
messageRouter.route(msg);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,446 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.LinkOption;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.attribute.FileTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.http.client.utils.URIBuilder;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog.Operation;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog.Origin;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
|
||||
public final class AuditMessage {
|
||||
|
||||
//clustername and cluster uuid
|
||||
private static final String AUTHORIZATION_HEADER = "Authorization";
|
||||
public static final String FORMAT_VERSION = "audit_format_version";
|
||||
public static final String CATEGORY = "audit_category";
|
||||
public static final String REQUEST_EFFECTIVE_USER = "audit_request_effective_user";
|
||||
public static final String REQUEST_INITIATING_USER = "audit_request_initiating_user";
|
||||
public static final String UTC_TIMESTAMP = "@timestamp";
|
||||
|
||||
@Deprecated
|
||||
private static final String UTC_TIMESTAMP_DEPRECATED = "audit_utc_timestamp";
|
||||
|
||||
public static final String CLUSTER_NAME = "audit_cluster_name";
|
||||
public static final String NODE_ID = "audit_node_id";
|
||||
public static final String NODE_HOST_ADDRESS = "audit_node_host_address";
|
||||
public static final String NODE_HOST_NAME = "audit_node_host_name";
|
||||
public static final String NODE_NAME = "audit_node_name";
|
||||
|
||||
public static final String ORIGIN = "audit_request_origin";
|
||||
public static final String REMOTE_ADDRESS = "audit_request_remote_address";
|
||||
|
||||
public static final String REST_REQUEST_PATH = "audit_rest_request_path";
|
||||
//public static final String REST_REQUEST_BODY = "audit_rest_request_body";
|
||||
public static final String REST_REQUEST_PARAMS = "audit_rest_request_params";
|
||||
public static final String REST_REQUEST_HEADERS = "audit_rest_request_headers";
|
||||
|
||||
public static final String TRANSPORT_REQUEST_TYPE = "audit_transport_request_type";
|
||||
public static final String TRANSPORT_ACTION = "audit_transport_action";
|
||||
public static final String TRANSPORT_REQUEST_HEADERS = "audit_transport_headers";
|
||||
|
||||
public static final String ID = "audit_trace_doc_id";
|
||||
public static final String TYPES = "audit_trace_doc_types";
|
||||
//public static final String SOURCE = "audit_trace_doc_source";
|
||||
public static final String INDICES = "audit_trace_indices";
|
||||
public static final String SHARD_ID = "audit_trace_shard_id";
|
||||
public static final String RESOLVED_INDICES = "audit_trace_resolved_indices";
|
||||
|
||||
public static final String EXCEPTION = "audit_request_exception_stacktrace";
|
||||
public static final String IS_ADMIN_DN = "audit_request_effective_user_is_admin";
|
||||
public static final String PRIVILEGE = "audit_request_privilege";
|
||||
|
||||
public static final String TASK_ID = "audit_trace_task_id";
|
||||
public static final String TASK_PARENT_ID = "audit_trace_task_parent_id";
|
||||
|
||||
public static final String REQUEST_BODY = "audit_request_body";
|
||||
public static final String COMPLIANCE_DIFF_IS_NOOP = "audit_compliance_diff_is_noop";
|
||||
public static final String COMPLIANCE_DIFF_CONTENT = "audit_compliance_diff_content";
|
||||
public static final String COMPLIANCE_FILE_INFOS = "audit_compliance_file_infos";
|
||||
|
||||
//public static final String COMPLIANCE_DIFF_STORED_IS_NOOP = "audit_compliance_diff_stored_is_noop";
|
||||
//public static final String COMPLIANCE_STORED_FIELDS_CONTENT = "audit_compliance_stored_fields_content";
|
||||
|
||||
public static final String REQUEST_LAYER = "audit_request_layer";
|
||||
|
||||
public static final String COMPLIANCE_OPERATION = "audit_compliance_operation";
|
||||
public static final String COMPLIANCE_DOC_VERSION = "audit_compliance_doc_version";
|
||||
|
||||
private static final DateTimeFormatter DEFAULT_FORMAT = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZZ");
|
||||
private final Map<String, Object> auditInfo = new HashMap<String, Object>(50);
|
||||
private final Category msgCategory;
|
||||
|
||||
public AuditMessage(final Category msgCategory, final ClusterService clusterService, final Origin origin, final Origin layer) {
|
||||
this.msgCategory = Objects.requireNonNull(msgCategory);
|
||||
final String currentTime = currentTime();
|
||||
auditInfo.put(FORMAT_VERSION, 3);
|
||||
auditInfo.put(CATEGORY, Objects.requireNonNull(msgCategory));
|
||||
auditInfo.put(UTC_TIMESTAMP, currentTime);
|
||||
auditInfo.put(UTC_TIMESTAMP_DEPRECATED, currentTime);
|
||||
auditInfo.put(NODE_HOST_ADDRESS, Objects.requireNonNull(clusterService).localNode().getHostAddress());
|
||||
auditInfo.put(NODE_ID, Objects.requireNonNull(clusterService).localNode().getId());
|
||||
auditInfo.put(NODE_HOST_NAME, Objects.requireNonNull(clusterService).localNode().getHostName());
|
||||
auditInfo.put(NODE_NAME, Objects.requireNonNull(clusterService).localNode().getName());
|
||||
auditInfo.put(CLUSTER_NAME, Objects.requireNonNull(clusterService).getClusterName().value());
|
||||
|
||||
if(origin != null) {
|
||||
auditInfo.put(ORIGIN, origin);
|
||||
}
|
||||
|
||||
if(layer != null) {
|
||||
auditInfo.put(REQUEST_LAYER, layer);
|
||||
}
|
||||
}
|
||||
|
||||
public void addRemoteAddress(TransportAddress remoteAddress) {
|
||||
if (remoteAddress != null && remoteAddress.getAddress() != null) {
|
||||
auditInfo.put(REMOTE_ADDRESS, remoteAddress.getAddress());
|
||||
}
|
||||
}
|
||||
|
||||
public void addIsAdminDn(boolean isAdminDn) {
|
||||
auditInfo.put(IS_ADMIN_DN, isAdminDn);
|
||||
}
|
||||
|
||||
public void addException(Throwable t) {
|
||||
if (t != null) {
|
||||
auditInfo.put(EXCEPTION, ExceptionsHelper.stackTrace(t));
|
||||
}
|
||||
}
|
||||
|
||||
public void addPrivilege(String priv) {
|
||||
if (priv != null) {
|
||||
auditInfo.put(PRIVILEGE, priv);
|
||||
}
|
||||
}
|
||||
|
||||
public void addInitiatingUser(String user) {
|
||||
if (user != null) {
|
||||
auditInfo.put(REQUEST_INITIATING_USER, user);
|
||||
}
|
||||
}
|
||||
|
||||
public void addEffectiveUser(String user) {
|
||||
if (user != null) {
|
||||
auditInfo.put(REQUEST_EFFECTIVE_USER, user);
|
||||
}
|
||||
}
|
||||
|
||||
public void addPath(String path) {
|
||||
if (path != null) {
|
||||
auditInfo.put(REST_REQUEST_PATH, path);
|
||||
}
|
||||
}
|
||||
|
||||
public void addComplianceWriteDiffSource(String diff) {
|
||||
if (diff != null && !diff.isEmpty()) {
|
||||
auditInfo.put(COMPLIANCE_DIFF_CONTENT, diff);
|
||||
auditInfo.put(COMPLIANCE_DIFF_IS_NOOP, false);
|
||||
} else if (diff != null && diff.isEmpty()) {
|
||||
auditInfo.put(COMPLIANCE_DIFF_IS_NOOP, true);
|
||||
}
|
||||
}
|
||||
|
||||
// public void addComplianceWriteStoredFields0(String diff) {
|
||||
// if (diff != null && !diff.isEmpty()) {
|
||||
// auditInfo.put(COMPLIANCE_STORED_FIELDS_CONTENT, diff);
|
||||
// //auditInfo.put(COMPLIANCE_DIFF_STORED_IS_NOOP, false);
|
||||
// }
|
||||
// }
|
||||
|
||||
public void addTupleToRequestBody(Tuple<XContentType, BytesReference> xContentTuple) {
|
||||
if (xContentTuple != null) {
|
||||
try {
|
||||
auditInfo.put(REQUEST_BODY, XContentHelper.convertToJson(xContentTuple.v2(), false, xContentTuple.v1()));
|
||||
} catch (Exception e) {
|
||||
auditInfo.put(REQUEST_BODY, "ERROR: Unable to convert to json because of "+e.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void addMapToRequestBody(Map<String, Object> map) {
|
||||
if(map != null) {
|
||||
auditInfo.put(REQUEST_BODY, Utils.convertStructuredMapToJson(map));
|
||||
}
|
||||
}
|
||||
|
||||
public void addUnescapedJsonToRequestBody(String source) {
|
||||
if (source != null) {
|
||||
auditInfo.put(REQUEST_BODY, source);
|
||||
}
|
||||
}
|
||||
|
||||
public void addRequestType(String requestType) {
|
||||
if (requestType != null) {
|
||||
auditInfo.put(TRANSPORT_REQUEST_TYPE, requestType);
|
||||
}
|
||||
}
|
||||
|
||||
public void addAction(String action) {
|
||||
if (action != null) {
|
||||
auditInfo.put(TRANSPORT_ACTION, action);
|
||||
}
|
||||
}
|
||||
|
||||
public void addId(String id) {
|
||||
if (id != null) {
|
||||
auditInfo.put(ID, id);
|
||||
}
|
||||
}
|
||||
|
||||
public void addTypes(String[] types) {
|
||||
if (types != null && types.length > 0) {
|
||||
auditInfo.put(TYPES, types);
|
||||
}
|
||||
}
|
||||
|
||||
public void addType(String type) {
|
||||
if (type != null) {
|
||||
auditInfo.put(TYPES, new String[] { type });
|
||||
}
|
||||
}
|
||||
|
||||
public void addFileInfos(Map<String, Path> paths) {
|
||||
if (paths != null && !paths.isEmpty()) {
|
||||
List<Object> infos = new ArrayList<>();
|
||||
for(Entry<String, Path> path: paths.entrySet()) {
|
||||
|
||||
try {
|
||||
if(Files.isReadable(path.getValue())) {
|
||||
final String chcksm = DigestUtils.sha256Hex(Files.readAllBytes(path.getValue()));
|
||||
FileTime lm = Files.getLastModifiedTime(path.getValue(), LinkOption.NOFOLLOW_LINKS);
|
||||
Map<String, Object> innerInfos = new HashMap<>();
|
||||
innerInfos.put("sha256", chcksm);
|
||||
innerInfos.put("last_modified", formatTime(lm.toMillis()));
|
||||
innerInfos.put("key", path.getKey());
|
||||
innerInfos.put("path", path.getValue().toAbsolutePath().toString());
|
||||
infos.add(innerInfos);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
//ignore non readable files
|
||||
}
|
||||
}
|
||||
auditInfo.put(COMPLIANCE_FILE_INFOS, infos);
|
||||
}
|
||||
}
|
||||
|
||||
/*public void addSource(Map<String, String> source) {
|
||||
if (source != null && !source.isEmpty()) {
|
||||
auditInfo.put(REQUEST_BODY, source);
|
||||
}
|
||||
}*/
|
||||
|
||||
public void addIndices(String[] indices) {
|
||||
if (indices != null && indices.length > 0) {
|
||||
auditInfo.put(INDICES, indices);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void addResolvedIndices(String[] resolvedIndices) {
|
||||
if (resolvedIndices != null && resolvedIndices.length > 0) {
|
||||
auditInfo.put(RESOLVED_INDICES, resolvedIndices);
|
||||
}
|
||||
}
|
||||
|
||||
public void addTaskId(long id) {
|
||||
auditInfo.put(TASK_ID, auditInfo.get(NODE_ID)+":"+id);
|
||||
}
|
||||
|
||||
public void addShardId(ShardId id) {
|
||||
if(id != null) {
|
||||
auditInfo.put(SHARD_ID, id.getId());
|
||||
}
|
||||
}
|
||||
|
||||
public void addTaskParentId(String id) {
|
||||
if(id != null) {
|
||||
auditInfo.put(TASK_PARENT_ID, id);
|
||||
}
|
||||
}
|
||||
|
||||
public void addRestParams(Map<String,String> params) {
|
||||
if(params != null && !params.isEmpty()) {
|
||||
auditInfo.put(REST_REQUEST_PARAMS, new HashMap<>(params));
|
||||
}
|
||||
}
|
||||
|
||||
public void addRestHeaders(Map<String,List<String>> headers, boolean excludeSensitiveHeaders) {
|
||||
if(headers != null && !headers.isEmpty()) {
|
||||
if(excludeSensitiveHeaders) {
|
||||
final Map<String, List<String>> headersClone = new HashMap<String, List<String>>(headers)
|
||||
.entrySet().stream()
|
||||
.filter(map -> !map.getKey().equalsIgnoreCase(AUTHORIZATION_HEADER))
|
||||
.collect(Collectors.toMap(p -> p.getKey(), p -> p.getValue()));
|
||||
auditInfo.put(REST_REQUEST_HEADERS, headersClone);
|
||||
} else {
|
||||
auditInfo.put(REST_REQUEST_HEADERS, new HashMap<String, List<String>>(headers));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void addTransportHeaders(Map<String,String> headers, boolean excludeSensitiveHeaders) {
|
||||
if(headers != null && !headers.isEmpty()) {
|
||||
if(excludeSensitiveHeaders) {
|
||||
final Map<String,String> headersClone = new HashMap<String,String>(headers)
|
||||
.entrySet().stream()
|
||||
.filter(map -> !map.getKey().equalsIgnoreCase(AUTHORIZATION_HEADER))
|
||||
.collect(Collectors.toMap(p -> p.getKey(), p -> p.getValue()));
|
||||
auditInfo.put(TRANSPORT_REQUEST_HEADERS, headersClone);
|
||||
} else {
|
||||
auditInfo.put(TRANSPORT_REQUEST_HEADERS, new HashMap<String,String>(headers));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void addComplianceOperation(Operation op) {
|
||||
if(op != null) {
|
||||
auditInfo.put(COMPLIANCE_OPERATION, op);
|
||||
}
|
||||
}
|
||||
|
||||
public void addComplianceDocVersion(long version) {
|
||||
auditInfo.put(COMPLIANCE_DOC_VERSION, version);
|
||||
}
|
||||
|
||||
public Map<String, Object> getAsMap() {
|
||||
return new HashMap<>(this.auditInfo);
|
||||
}
|
||||
|
||||
public String getInitiatingUser() {
|
||||
return (String) this.auditInfo.get(REQUEST_INITIATING_USER);
|
||||
}
|
||||
|
||||
public String getEffectiveUser() {
|
||||
return (String) this.auditInfo.get(REQUEST_EFFECTIVE_USER);
|
||||
}
|
||||
|
||||
public String getRequestType() {
|
||||
return (String) this.auditInfo.get(TRANSPORT_REQUEST_TYPE);
|
||||
}
|
||||
|
||||
public Category getCategory() {
|
||||
return msgCategory;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
return Strings.toString(JsonXContent.contentBuilder().map(getAsMap()));
|
||||
} catch (final IOException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
}
|
||||
}
|
||||
|
||||
public String toPrettyString() {
|
||||
try {
|
||||
return Strings.toString(JsonXContent.contentBuilder().prettyPrint().map(getAsMap()));
|
||||
} catch (final IOException e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
}
|
||||
}
|
||||
|
||||
public String toText() {
|
||||
StringBuilder builder = new StringBuilder();
|
||||
for (Entry<String, Object> entry : getAsMap().entrySet()) {
|
||||
addIfNonEmpty(builder, entry.getKey(), stringOrNull(entry.getValue()));
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
public final String toJson() {
|
||||
return this.toString();
|
||||
}
|
||||
|
||||
public String toUrlParameters() {
|
||||
URIBuilder builder = new URIBuilder();
|
||||
for (Entry<String, Object> entry : getAsMap().entrySet()) {
|
||||
builder.addParameter(entry.getKey(), stringOrNull(entry.getValue()));
|
||||
}
|
||||
return builder.toString();
|
||||
}
|
||||
|
||||
protected static void addIfNonEmpty(StringBuilder builder, String key, String value) {
|
||||
if (!Strings.isEmpty(value)) {
|
||||
if (builder.length() > 0) {
|
||||
builder.append("\n");
|
||||
}
|
||||
builder.append(key).append(": ").append(value);
|
||||
}
|
||||
}
|
||||
|
||||
private String currentTime() {
|
||||
DateTime dt = new DateTime(DateTimeZone.UTC);
|
||||
return DEFAULT_FORMAT.print(dt);
|
||||
}
|
||||
|
||||
private String formatTime(long epoch) {
|
||||
DateTime dt = new DateTime(epoch, DateTimeZone.UTC);
|
||||
return DEFAULT_FORMAT.print(dt);
|
||||
}
|
||||
|
||||
protected String stringOrNull(Object object) {
|
||||
if(object == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return String.valueOf(object);
|
||||
}
|
||||
|
||||
public static enum Category {
|
||||
BAD_HEADERS,
|
||||
FAILED_LOGIN,
|
||||
MISSING_PRIVILEGES,
|
||||
GRANTED_PRIVILEGES,
|
||||
OPENDISTRO_SECURITY_INDEX_ATTEMPT,
|
||||
SSL_EXCEPTION,
|
||||
AUTHENTICATED,
|
||||
COMPLIANCE_DOC_READ,
|
||||
COMPLIANCE_DOC_WRITE,
|
||||
COMPLIANCE_EXTERNAL_CONFIG,
|
||||
COMPLIANCE_INTERNAL_CONFIG_READ,
|
||||
COMPLIANCE_INTERNAL_CONFIG_WRITE;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,423 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.bulk.BulkItemRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.elasticsearch.index.reindex.ReindexRequest;
|
||||
import org.elasticsearch.index.reindex.UpdateByQueryRequest;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog.Origin;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage.Category;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.WildcardMatcher;
|
||||
|
||||
public final class RequestResolver {
|
||||
|
||||
private static final Logger log = LogManager.getLogger(RequestResolver.class);
|
||||
|
||||
public static List<AuditMessage> resolve(
|
||||
final Category category,
|
||||
final Origin origin,
|
||||
final String action,
|
||||
final String privilege,
|
||||
final String effectiveUser,
|
||||
final Boolean securityadmin,
|
||||
final String initiatingUser,
|
||||
final TransportAddress remoteAddress,
|
||||
final TransportRequest request,
|
||||
final Map<String, String> headers,
|
||||
final Task task,
|
||||
final IndexNameExpressionResolver resolver,
|
||||
final ClusterService cs,
|
||||
final Settings settings,
|
||||
final boolean logRequestBody,
|
||||
final boolean resolveIndices,
|
||||
final boolean resolveBulk,
|
||||
final String opendistrosecurityIndex,
|
||||
final boolean excludeSensitiveHeaders,
|
||||
final Throwable exception) {
|
||||
|
||||
if(resolveBulk && request instanceof BulkShardRequest) {
|
||||
final BulkItemRequest[] innerRequests = ((BulkShardRequest) request).items();
|
||||
final List<AuditMessage> messages = new ArrayList<AuditMessage>(innerRequests.length);
|
||||
|
||||
for(BulkItemRequest ar: innerRequests) {
|
||||
final DocWriteRequest<?> innerRequest = ar.request();
|
||||
final AuditMessage msg = resolveInner(
|
||||
category,
|
||||
effectiveUser,
|
||||
securityadmin,
|
||||
initiatingUser,
|
||||
remoteAddress,
|
||||
action,
|
||||
privilege,
|
||||
origin,
|
||||
innerRequest,
|
||||
headers,
|
||||
task,
|
||||
resolver,
|
||||
cs,
|
||||
settings,
|
||||
logRequestBody,
|
||||
resolveIndices,
|
||||
opendistrosecurityIndex,
|
||||
excludeSensitiveHeaders,
|
||||
exception);
|
||||
msg.addShardId(((BulkShardRequest) request).shardId());
|
||||
|
||||
messages.add(msg);
|
||||
}
|
||||
|
||||
return messages;
|
||||
}
|
||||
|
||||
if(request instanceof BulkShardRequest) {
|
||||
|
||||
if(category != Category.FAILED_LOGIN
|
||||
&& category != Category.MISSING_PRIVILEGES
|
||||
&& category != Category.OPENDISTRO_SECURITY_INDEX_ATTEMPT) {
|
||||
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.singletonList(resolveInner(
|
||||
category,
|
||||
effectiveUser,
|
||||
securityadmin,
|
||||
initiatingUser,
|
||||
remoteAddress,
|
||||
action,
|
||||
privilege,
|
||||
origin,
|
||||
request,
|
||||
headers,
|
||||
task,
|
||||
resolver,
|
||||
cs,
|
||||
settings,
|
||||
logRequestBody,
|
||||
resolveIndices,
|
||||
opendistrosecurityIndex,
|
||||
excludeSensitiveHeaders,
|
||||
exception));
|
||||
}
|
||||
|
||||
|
||||
private static AuditMessage resolveInner(final Category category,
|
||||
final String effectiveUser,
|
||||
final Boolean securityadmin,
|
||||
final String initiatingUser,
|
||||
final TransportAddress remoteAddress,
|
||||
final String action,
|
||||
final String priv,
|
||||
final Origin origin,
|
||||
final Object request,
|
||||
final Map<String, String> headers,
|
||||
final Task task,
|
||||
final IndexNameExpressionResolver resolver,
|
||||
final ClusterService cs,
|
||||
final Settings settings,
|
||||
final boolean logRequestBody,
|
||||
final boolean resolveIndices,
|
||||
final String opendistrosecurityIndex,
|
||||
final boolean excludeSensitiveHeaders,
|
||||
final Throwable exception) {
|
||||
|
||||
final AuditMessage msg = new AuditMessage(category, cs, origin, Origin.TRANSPORT);
|
||||
msg.addInitiatingUser(initiatingUser);
|
||||
msg.addEffectiveUser(effectiveUser);
|
||||
msg.addRemoteAddress(remoteAddress);
|
||||
msg.addAction(action);
|
||||
|
||||
if(request != null) {
|
||||
msg.addRequestType(request.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
if(securityadmin != null) {
|
||||
msg.addIsAdminDn(securityadmin);
|
||||
}
|
||||
|
||||
msg.addException(exception);
|
||||
msg.addPrivilege(priv);
|
||||
msg.addTransportHeaders(headers, excludeSensitiveHeaders);
|
||||
|
||||
if(task != null) {
|
||||
msg.addTaskId(task.getId());
|
||||
if(task.getParentTaskId() != null && task.getParentTaskId().isSet()) {
|
||||
msg.addTaskParentId(task.getParentTaskId().toString());
|
||||
}
|
||||
}
|
||||
|
||||
//attempt to resolve indices/types/id/source
|
||||
if (request instanceof MultiGetRequest.Item) {
|
||||
final MultiGetRequest.Item item = (MultiGetRequest.Item) request;
|
||||
final String[] indices = arrayOrEmpty(item.indices());
|
||||
final String type = item.type();
|
||||
final String id = item.id();
|
||||
msg.addType(type);
|
||||
msg.addId(id);
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof CreateIndexRequest) {
|
||||
final CreateIndexRequest cir = (CreateIndexRequest) request;
|
||||
final String[] indices = arrayOrEmpty(cir.indices());
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof DeleteIndexRequest) {
|
||||
final DeleteIndexRequest dir = (DeleteIndexRequest) request;
|
||||
final String[] indices = arrayOrEmpty(dir.indices());
|
||||
//dir id alle id's beim schreiben protokolloieren
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof IndexRequest) {
|
||||
final IndexRequest ir = (IndexRequest) request;
|
||||
final String[] indices = arrayOrEmpty(ir.indices());
|
||||
final String type = ir.type();
|
||||
final String id = ir.id();
|
||||
msg.addShardId(ir.shardId());
|
||||
msg.addType(type);
|
||||
msg.addId(id);
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, ir.getContentType(), ir.source(), settings, resolveIndices, logRequestBody, true, opendistrosecurityIndex);
|
||||
} else if (request instanceof DeleteRequest) {
|
||||
final DeleteRequest dr = (DeleteRequest) request;
|
||||
final String[] indices = arrayOrEmpty(dr.indices());
|
||||
final String type = dr.type();
|
||||
final String id = dr.id();
|
||||
msg.addShardId(dr.shardId());
|
||||
msg.addType(type);
|
||||
msg.addId(id);
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof UpdateRequest) {
|
||||
final UpdateRequest ur = (UpdateRequest) request;
|
||||
final String[] indices = arrayOrEmpty(ur.indices());
|
||||
final String type = ur.type();
|
||||
final String id = ur.id();
|
||||
msg.addType(type);
|
||||
msg.addId(id);
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
if(logRequestBody) {
|
||||
|
||||
if (ur.doc() != null) {
|
||||
msg.addTupleToRequestBody(ur.doc() == null ? null :convertSource(ur.doc().getContentType(), ur.doc().source()));
|
||||
}
|
||||
|
||||
if (ur.script() != null) {
|
||||
msg.addMapToRequestBody(ur.script() == null ? null : Utils.convertJsonToxToStructuredMap(ur.script()));
|
||||
}
|
||||
}
|
||||
} else if (request instanceof GetRequest) {
|
||||
final GetRequest gr = (GetRequest) request;
|
||||
final String[] indices = arrayOrEmpty(gr.indices());
|
||||
final String type = gr.type();
|
||||
final String id = gr.id();
|
||||
msg.addType(type);
|
||||
msg.addId(id);
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof SearchRequest) {
|
||||
final SearchRequest sr = (SearchRequest) request;
|
||||
final String[] indices = arrayOrEmpty(sr.indices());
|
||||
final String[] types = arrayOrEmpty(sr.types());
|
||||
|
||||
msg.addTypes(types);
|
||||
Map<String, Object> sourceAsMap = sr.source() == null? null:Utils.convertJsonToxToStructuredMap(sr.source());
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, XContentType.JSON, sourceAsMap, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof ClusterUpdateSettingsRequest) {
|
||||
if(logRequestBody) {
|
||||
final ClusterUpdateSettingsRequest cusr = (ClusterUpdateSettingsRequest) request;
|
||||
final Settings persistentSettings = cusr.persistentSettings();
|
||||
final Settings transientSettings = cusr.transientSettings();
|
||||
|
||||
XContentBuilder builder = null;
|
||||
try {
|
||||
|
||||
builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
if(persistentSettings != null) {
|
||||
builder.field("persistent_settings", Utils.convertJsonToxToStructuredMap(persistentSettings));
|
||||
}
|
||||
if(transientSettings != null) {
|
||||
builder.field("transient_settings", Utils.convertJsonToxToStructuredMap(persistentSettings));
|
||||
}
|
||||
builder.endObject();
|
||||
msg.addUnescapedJsonToRequestBody(builder == null?null:Strings.toString(builder));
|
||||
} catch (IOException e) {
|
||||
log.error(e);
|
||||
} finally {
|
||||
if(builder != null) {
|
||||
builder.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
} else if (request instanceof ReindexRequest) {
|
||||
final IndexRequest ir = ((ReindexRequest) request).getDestination();
|
||||
final String[] indices = arrayOrEmpty(ir.indices());
|
||||
final String type = ir.type();
|
||||
final String id = ir.id();
|
||||
msg.addShardId(ir.shardId());
|
||||
msg.addType(type);
|
||||
msg.addId(id);
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, ir.getContentType(), ir.source(), settings, resolveIndices, logRequestBody, true, opendistrosecurityIndex);
|
||||
} else if (request instanceof DeleteByQueryRequest) {
|
||||
final DeleteByQueryRequest ir = (DeleteByQueryRequest) request;
|
||||
final String[] indices = arrayOrEmpty(ir.indices());
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof UpdateByQueryRequest) {
|
||||
final UpdateByQueryRequest ir = (UpdateByQueryRequest) request;
|
||||
final String[] indices = arrayOrEmpty(ir.indices());
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
} else if (request instanceof PutMappingRequest) {
|
||||
final PutMappingRequest pr = (PutMappingRequest) request;
|
||||
final Index ci = pr.getConcreteIndex();
|
||||
msg.addType(pr.type());
|
||||
String[] indices = new String[0];
|
||||
msg.addIndices(indices);
|
||||
|
||||
if(ci != null) {
|
||||
indices = new String[]{ci.getName()};
|
||||
}
|
||||
|
||||
if(logRequestBody) {
|
||||
msg.addUnescapedJsonToRequestBody(pr.source());
|
||||
}
|
||||
|
||||
if(resolveIndices) {
|
||||
msg.addResolvedIndices(indices);
|
||||
}
|
||||
} else if (request instanceof IndicesRequest) { //less specific
|
||||
final IndicesRequest ir = (IndicesRequest) request;
|
||||
final String[] indices = arrayOrEmpty(ir.indices());
|
||||
addIndicesSourceSafe(msg, indices, resolver, cs, null, null, settings, resolveIndices, logRequestBody, false, opendistrosecurityIndex);
|
||||
}
|
||||
|
||||
return msg;
|
||||
}
|
||||
|
||||
private static void addIndicesSourceSafe(final AuditMessage msg,
|
||||
final String[] indices,
|
||||
final IndexNameExpressionResolver resolver,
|
||||
final ClusterService cs,
|
||||
final XContentType xContentType,
|
||||
final Object source,
|
||||
final Settings settings,
|
||||
boolean resolveIndices,
|
||||
final boolean addSource,
|
||||
final boolean sourceIsSensitive,
|
||||
final String opendistrosecurityIndex) {
|
||||
|
||||
if(addSource) {
|
||||
resolveIndices = true;
|
||||
}
|
||||
|
||||
final String[] _indices = indices == null?new String[0]:indices;
|
||||
msg.addIndices(_indices);
|
||||
|
||||
final Set<String> allIndices;
|
||||
|
||||
if(resolveIndices) {
|
||||
final String[] resolvedIndices = (resolver==null)?new String[0]:resolver.concreteIndexNames(cs.state(), IndicesOptions.lenientExpandOpen(), indices);
|
||||
msg.addResolvedIndices(resolvedIndices);
|
||||
allIndices = new HashSet<String>(resolvedIndices.length+_indices.length);
|
||||
allIndices.addAll(Arrays.asList(_indices));
|
||||
allIndices.addAll(Arrays.asList(resolvedIndices));
|
||||
if(allIndices.contains("_all")) {
|
||||
allIndices.add("*");
|
||||
}
|
||||
} else {
|
||||
allIndices = new HashSet<String>(_indices.length);
|
||||
allIndices.addAll(Arrays.asList(_indices));
|
||||
if(allIndices.contains("_all")) {
|
||||
allIndices.add("*");
|
||||
}
|
||||
}
|
||||
|
||||
if(addSource) {
|
||||
if(sourceIsSensitive && source != null) {
|
||||
if(!WildcardMatcher.matchAny(allIndices.toArray(new String[0]), opendistrosecurityIndex)) {
|
||||
if(source instanceof BytesReference) {
|
||||
msg.addTupleToRequestBody(convertSource(xContentType, (BytesReference) source));
|
||||
} else {
|
||||
msg.addMapToRequestBody((Map) source);
|
||||
}
|
||||
}
|
||||
} else if(source != null) {
|
||||
if(source instanceof BytesReference) {
|
||||
msg.addTupleToRequestBody(convertSource(xContentType, (BytesReference) source));
|
||||
} else {
|
||||
msg.addMapToRequestBody((Map) source);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Tuple<XContentType, BytesReference> convertSource(XContentType type, BytesReference bytes) {
|
||||
if(type == null) {
|
||||
type = XContentType.JSON;
|
||||
}
|
||||
|
||||
return new Tuple<XContentType, BytesReference>(type, bytes);
|
||||
}
|
||||
|
||||
private static String[] arrayOrEmpty(String[] array) {
|
||||
if(array == null) {
|
||||
return new String[0];
|
||||
}
|
||||
|
||||
if(array.length == 1 && array[0] == null) {
|
||||
return new String[0];
|
||||
}
|
||||
|
||||
return array;
|
||||
}
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.routing;
|
||||
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.LinkedBlockingQueue;
|
||||
import java.util.concurrent.ThreadPoolExecutor;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.sink.AuditLogSink;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class AsyncStoragePool {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
|
||||
private static final int DEFAULT_THREAD_POOL_SIZE = 10;
|
||||
private static final int DEFAULT_THREAD_POOL_MAX_QUEUE_LEN = 100 * 1000;
|
||||
|
||||
// package private for unit tests
|
||||
final ExecutorService pool;
|
||||
|
||||
int threadPoolSize;
|
||||
int threadPoolMaxQueueLen;
|
||||
|
||||
public AsyncStoragePool(final Settings settings) {
|
||||
this.threadPoolSize = settings.getAsInt(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_THREADPOOL_SIZE, DEFAULT_THREAD_POOL_SIZE).intValue();
|
||||
this.threadPoolMaxQueueLen = settings.getAsInt(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_THREADPOOL_MAX_QUEUE_LEN, DEFAULT_THREAD_POOL_MAX_QUEUE_LEN).intValue();
|
||||
|
||||
if (threadPoolSize <= 0) {
|
||||
threadPoolSize = DEFAULT_THREAD_POOL_SIZE;
|
||||
}
|
||||
|
||||
if (threadPoolMaxQueueLen <= 0) {
|
||||
threadPoolMaxQueueLen = DEFAULT_THREAD_POOL_MAX_QUEUE_LEN;
|
||||
}
|
||||
|
||||
this.pool = createExecutor(threadPoolSize, threadPoolMaxQueueLen);
|
||||
}
|
||||
|
||||
public void submit(AuditMessage message, AuditLogSink sink) {
|
||||
try {
|
||||
pool.submit(() -> {
|
||||
sink.store(message);
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("stored on delegate {} asynchronously", sink.getClass().getSimpleName());
|
||||
}
|
||||
});
|
||||
} catch (Exception ex) {
|
||||
log.error("Could not submit audit message {} to thread pool for delegate '{}' due to '{}'", message, sink.getClass().getSimpleName(), ex.getMessage());
|
||||
if (sink.getFallbackSink() != null) {
|
||||
sink.getFallbackSink().store(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private ThreadPoolExecutor createExecutor(final int threadPoolSize, final int maxQueueLen) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Create new executor with threadPoolSize: {} and maxQueueLen: {}", threadPoolSize, maxQueueLen);
|
||||
}
|
||||
return new ThreadPoolExecutor(threadPoolSize, threadPoolSize, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(maxQueueLen));
|
||||
}
|
||||
|
||||
public void close() {
|
||||
|
||||
if (pool != null) {
|
||||
pool.shutdown(); // Disable new tasks from being submitted
|
||||
|
||||
try {
|
||||
// Wait a while for existing tasks to terminate
|
||||
if (!pool.awaitTermination(60, TimeUnit.SECONDS)) {
|
||||
pool.shutdownNow(); // Cancel currently executing tasks
|
||||
// Wait a while for tasks to respond to being cancelled
|
||||
if (!pool.awaitTermination(60, TimeUnit.SECONDS))
|
||||
log.error("Pool did not terminate");
|
||||
}
|
||||
} catch (InterruptedException ie) {
|
||||
// (Re-)Cancel if current thread also interrupted
|
||||
pool.shutdownNow();
|
||||
// Preserve interrupt status
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,184 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.routing;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumMap;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage.Category;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.sink.AuditLogSink;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.sink.SinkProvider;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class AuditMessageRouter {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
final AuditLogSink defaultSink;
|
||||
final Map<Category, List<AuditLogSink>> categorySinks = new EnumMap<>(Category.class);
|
||||
final SinkProvider sinkProvider;
|
||||
final AsyncStoragePool storagePool;
|
||||
final boolean enabled;
|
||||
boolean hasMultipleEndpoints;
|
||||
private ComplianceConfig complianceConfig;
|
||||
|
||||
public AuditMessageRouter(final Settings settings, final Client clientProvider, ThreadPool threadPool, final Path configPath) {
|
||||
this.sinkProvider = new SinkProvider(settings, clientProvider, threadPool, configPath);
|
||||
this.storagePool = new AsyncStoragePool(settings);
|
||||
|
||||
// get the default sink
|
||||
this.defaultSink = sinkProvider.getDefaultSink();
|
||||
if (defaultSink == null) {
|
||||
log.warn("No default storage available, audit log may not work properly. Please check configuration.");
|
||||
enabled = false;
|
||||
} else {
|
||||
// create sinks for all categories. Only do that if we have any extended setting, otherwise there is just the default category
|
||||
setupRoutes(settings);
|
||||
enabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
public void setComplianceConfig(ComplianceConfig complianceConfig) {
|
||||
this.complianceConfig = complianceConfig;
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return this.enabled;
|
||||
}
|
||||
|
||||
public final void route(final AuditMessage msg) {
|
||||
if (!enabled) {
|
||||
// should not happen since we check in AuditLogImpl, so this is just a safeguard
|
||||
log.error("#route(AuditMessage) called but message router is disabled");
|
||||
return;
|
||||
}
|
||||
// if we do not run the compliance features or no extended configuration is present, only log to default.
|
||||
if (!hasMultipleEndpoints || complianceConfig == null || !complianceConfig.isEnabled()) {
|
||||
store(defaultSink, msg);
|
||||
} else {
|
||||
for (AuditLogSink sink : categorySinks.get(msg.getCategory())) {
|
||||
store(sink, msg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public final void close() {
|
||||
// shutdown storage pool
|
||||
storagePool.close();
|
||||
// close default
|
||||
sinkProvider.close();
|
||||
}
|
||||
|
||||
protected final void close(List<AuditLogSink> sinks) {
|
||||
for (AuditLogSink sink : sinks) {
|
||||
try {
|
||||
log.info("Closing {}", sink.getClass().getSimpleName());
|
||||
sink.close();
|
||||
} catch (Exception ex) {
|
||||
log.info("Could not close delegate '{}' due to '{}'", sink.getClass().getSimpleName(), ex.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final void setupRoutes(Settings settings) {
|
||||
Map<String, Object> routesConfiguration = Utils.convertJsonToxToStructuredMap(settings.getAsSettings(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_ROUTES));
|
||||
if (!routesConfiguration.isEmpty()) {
|
||||
hasMultipleEndpoints = true;
|
||||
// first set up all configured routes. We do it this way so category names are case insensitive
|
||||
// and we can warn if a non-existing category has been detected.
|
||||
for (Entry<String, Object> routesEntry : routesConfiguration.entrySet()) {
|
||||
log.trace("Setting up routes for endpoint {}, configuraton is {}", routesEntry.getKey(), routesEntry.getValue());
|
||||
String categoryName = routesEntry.getKey();
|
||||
try {
|
||||
Category category = Category.valueOf(categoryName.toUpperCase());
|
||||
// warn for duplicate definitions
|
||||
if (categorySinks.get(category) != null) {
|
||||
log.warn("Duplicate routing configuration detected for category {}, skipping.", category);
|
||||
continue;
|
||||
}
|
||||
List<AuditLogSink> sinksForCategory = createSinksForCategory(category, settings.getAsSettings(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_ROUTES + "." + categoryName));
|
||||
if (!sinksForCategory.isEmpty()) {
|
||||
categorySinks.put(category, sinksForCategory);
|
||||
if(log.isTraceEnabled()) {
|
||||
log.debug("Created {} endpoints for category {}", sinksForCategory.size(), category );
|
||||
}
|
||||
} else {
|
||||
log.debug("No valid endpoints found for category {} adding only default.", category );
|
||||
|
||||
}
|
||||
} catch (Exception e ) {
|
||||
log.error("Invalid category '{}' found in routing configuration. Must be one of: {}", categoryName, Category.values());
|
||||
}
|
||||
}
|
||||
// for all non-configured categories we automatically set up the default endpoint
|
||||
for(Category category : Category.values()) {
|
||||
if (!categorySinks.containsKey(category)) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("No endpoint configured for category {}, adding default endpoint", category);
|
||||
}
|
||||
categorySinks.put(category, Collections.singletonList(defaultSink));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private final List<AuditLogSink> createSinksForCategory(Category category, Settings configuration) {
|
||||
List<AuditLogSink> sinksForCategory = new LinkedList<>();
|
||||
List<String> sinks = configuration.getAsList("endpoints");
|
||||
if (sinks == null || sinks.isEmpty()) {
|
||||
log.error("No endpoints configured for category {}", category);
|
||||
return sinksForCategory;
|
||||
}
|
||||
for (String sinkName : sinks) {
|
||||
AuditLogSink sink = sinkProvider.getSink(sinkName);
|
||||
if (sink != null && !sinksForCategory.contains(sink)) {
|
||||
sinksForCategory.add(sink);
|
||||
} else {
|
||||
log.error("Configured endpoint '{}' not available", sinkName);
|
||||
}
|
||||
}
|
||||
return sinksForCategory;
|
||||
}
|
||||
|
||||
private final void store(AuditLogSink sink, AuditMessage msg) {
|
||||
if (sink.isHandlingBackpressure()) {
|
||||
sink.store(msg);
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("stored on sink {} synchronously", sink.getClass().getSimpleName());
|
||||
}
|
||||
} else {
|
||||
storagePool.submit(msg, sink);
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("will store on sink {} asynchronously", sink.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,143 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.google.common.util.concurrent.Uninterruptibles;
|
||||
|
||||
public abstract class AuditLogSink {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
protected final Settings settings;
|
||||
protected final String settingsPrefix;
|
||||
private final String name;
|
||||
protected final AuditLogSink fallbackSink;
|
||||
private final int retryCount;
|
||||
private final long delayMs;
|
||||
|
||||
protected AuditLogSink(String name, Settings settings, String settingsPrefix, AuditLogSink fallbackSink) {
|
||||
this.name = name.toLowerCase();
|
||||
this.settings = Objects.requireNonNull(settings);
|
||||
this.settingsPrefix = settingsPrefix;
|
||||
this.fallbackSink = fallbackSink;
|
||||
|
||||
retryCount = settings.getAsInt(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_RETRY_COUNT, 0);
|
||||
delayMs = settings.getAsLong(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_RETRY_DELAY_MS, 1000L);
|
||||
}
|
||||
|
||||
public boolean isHandlingBackpressure() {
|
||||
return false;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public AuditLogSink getFallbackSink() {
|
||||
return fallbackSink;
|
||||
}
|
||||
|
||||
public final void store(AuditMessage msg) {
|
||||
if (!doStoreWithRetry(msg) && !fallbackSink.doStoreWithRetry(msg)) {
|
||||
System.err.println(msg.toPrettyString());
|
||||
}
|
||||
}
|
||||
|
||||
private boolean doStoreWithRetry(AuditMessage msg) {
|
||||
//retryCount of 0 means no retry (which is: try exactly once) - delayMs is ignored
|
||||
//retryCount of 1 means: try and if this fails wait delayMs and try once again
|
||||
|
||||
if(doStore(msg)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
for(int i=0; i<retryCount; i++) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Retry attempt {}/{} for {} ({})", i+1, retryCount, this.getName(), this.getClass());
|
||||
}
|
||||
Uninterruptibles.sleepUninterruptibly(delayMs, TimeUnit.MILLISECONDS);
|
||||
if(!doStore(msg)) {
|
||||
continue;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
protected abstract boolean doStore(AuditMessage msg);
|
||||
|
||||
public void close() throws IOException {
|
||||
// to be implemented by subclasses
|
||||
}
|
||||
|
||||
protected String getExpandedIndexName(DateTimeFormatter indexPattern, String index) {
|
||||
if(indexPattern == null) {
|
||||
return index;
|
||||
}
|
||||
return indexPattern.print(DateTime.now(DateTimeZone.UTC));
|
||||
}
|
||||
|
||||
protected Settings getSinkSettings(String prefix) {
|
||||
return settings.getAsSettings(prefix);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return ("AudtLogSink: Name: " + name+", type: " + this.getClass().getSimpleName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
AuditLogSink other = (AuditLogSink) obj;
|
||||
if (name == null) {
|
||||
if (other.name != null)
|
||||
return false;
|
||||
} else if (!name.equals(other.name))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
|
||||
public final class DebugSink extends AuditLogSink {
|
||||
|
||||
public DebugSink(String name, Settings settings, AuditLogSink fallbackSink) {
|
||||
super(name, settings, null, fallbackSink);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isHandlingBackpressure() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doStore(final AuditMessage msg) {
|
||||
System.out.println("AUDIT_LOG: " + msg.toPrettyString());
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,186 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.KeyStore;
|
||||
import java.security.PrivateKey;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
import com.amazon.opendistroforelasticsearch.security.httpclient.HttpClient;
|
||||
import com.amazon.opendistroforelasticsearch.security.httpclient.HttpClient.HttpClientBuilder;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.util.SSLConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.PemKeyReader;
|
||||
|
||||
public final class ExternalESSink extends AuditLogSink {
|
||||
|
||||
private static final List<String> DEFAULT_TLS_PROTOCOLS = Arrays.asList(new String[] { "TLSv1.2", "TLSv1.1"});
|
||||
// config in elasticsearch.yml
|
||||
private final String index;
|
||||
private final String type;
|
||||
private final HttpClient client;
|
||||
private List<String> servers;
|
||||
private DateTimeFormatter indexPattern;
|
||||
|
||||
static final String PKCS12 = "PKCS12";
|
||||
|
||||
public ExternalESSink(final String name, final Settings settings, final String settingPrefix, final Path configPath, AuditLogSink fallbackSink) throws Exception {
|
||||
|
||||
super(name, settings, settingPrefix, fallbackSink);
|
||||
Settings sinkSettings = settings.getAsSettings(settingPrefix);
|
||||
servers = sinkSettings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_HTTP_ENDPOINTS);
|
||||
if (servers == null || servers.size() == 0) {
|
||||
log.error("No http endpoints configured for external Elasticsearch endpoint '{}', falling back to localhost.", name);
|
||||
servers = Collections.singletonList("localhost:9200");
|
||||
}
|
||||
|
||||
this.index = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_ES_INDEX, "'security-auditlog-'YYYY.MM.dd");
|
||||
|
||||
try {
|
||||
this.indexPattern = DateTimeFormat.forPattern(index);
|
||||
} catch (IllegalArgumentException e) {
|
||||
log.debug("Unable to parse index pattern due to {}. "
|
||||
+ "If you have no date pattern configured you can safely ignore this message", e.getMessage());
|
||||
}
|
||||
|
||||
this.type = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_ES_TYPE, "auditlog");
|
||||
final boolean verifyHostnames = sinkSettings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_VERIFY_HOSTNAMES, true);
|
||||
final boolean enableSsl = sinkSettings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_ENABLE_SSL, false);
|
||||
final boolean enableSslClientAuth = sinkSettings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_ENABLE_SSL_CLIENT_AUTH , ConfigConstants.OPENDISTRO_SECURITY_AUDIT_SSL_ENABLE_SSL_CLIENT_AUTH_DEFAULT);
|
||||
final String user = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_USERNAME);
|
||||
final String password = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PASSWORD);
|
||||
|
||||
final HttpClientBuilder builder = HttpClient.builder(servers.toArray(new String[0]));
|
||||
|
||||
if (enableSsl) {
|
||||
|
||||
final boolean pem = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMTRUSTEDCAS_FILEPATH, null) != null
|
||||
|| sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMTRUSTEDCAS_CONTENT, null) != null;
|
||||
|
||||
KeyStore effectiveTruststore;
|
||||
KeyStore effectiveKeystore;
|
||||
char[] effectiveKeyPassword;
|
||||
String effectiveKeyAlias;
|
||||
|
||||
if(pem) {
|
||||
X509Certificate[] trustCertificates = PemKeyReader.loadCertificatesFromStream(PemKeyReader.resolveStream(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMTRUSTEDCAS_CONTENT, sinkSettings));
|
||||
|
||||
if(trustCertificates == null) {
|
||||
String path = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMTRUSTEDCAS_FILEPATH);
|
||||
trustCertificates = PemKeyReader.loadCertificatesFromFile(PemKeyReader.resolve(path, ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMTRUSTEDCAS_FILEPATH, settings, configPath, true));
|
||||
}
|
||||
|
||||
//for client authentication
|
||||
X509Certificate[] authenticationCertificate = PemKeyReader.loadCertificatesFromStream(PemKeyReader.resolveStream(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMCERT_CONTENT, sinkSettings));
|
||||
|
||||
if(authenticationCertificate == null) {
|
||||
String path = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMCERT_FILEPATH);
|
||||
authenticationCertificate = PemKeyReader.loadCertificatesFromFile(PemKeyReader.resolve(path, ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMCERT_FILEPATH, settings, configPath, enableSslClientAuth));
|
||||
}
|
||||
|
||||
PrivateKey authenticationKey = PemKeyReader.loadKeyFromStream(sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMKEY_PASSWORD), PemKeyReader.resolveStream(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMKEY_CONTENT, sinkSettings));
|
||||
|
||||
if(authenticationKey == null) {
|
||||
String path = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMKEY_FILEPATH);
|
||||
authenticationKey = PemKeyReader.loadKeyFromFile(sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMKEY_PASSWORD), PemKeyReader.resolve(path, ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_PEMKEY_FILEPATH, settings, configPath, enableSslClientAuth));
|
||||
}
|
||||
|
||||
effectiveKeyPassword = PemKeyReader.randomChars(12);
|
||||
effectiveKeyAlias = "al";
|
||||
effectiveTruststore = PemKeyReader.toTruststore(effectiveKeyAlias, trustCertificates);
|
||||
effectiveKeystore = PemKeyReader.toKeystore(effectiveKeyAlias, effectiveKeyPassword, authenticationCertificate, authenticationKey);
|
||||
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Use PEM to secure communication with auditlog server (client auth is {})", authenticationKey!=null);
|
||||
}
|
||||
|
||||
} else {
|
||||
final KeyStore trustStore = PemKeyReader.loadKeyStore(PemKeyReader.resolve(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH, settings, configPath, true)
|
||||
, settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_PASSWORD, SSLConfigConstants.DEFAULT_STORE_PASSWORD)
|
||||
, settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_TYPE));
|
||||
|
||||
//for client authentication
|
||||
final KeyStore keyStore = PemKeyReader.loadKeyStore(PemKeyReader.resolve(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_FILEPATH, settings, configPath, enableSslClientAuth)
|
||||
, settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_PASSWORD, SSLConfigConstants.DEFAULT_STORE_PASSWORD)
|
||||
, settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_TYPE));
|
||||
final String keyStorePassword = settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_KEYSTORE_PASSWORD, SSLConfigConstants.DEFAULT_STORE_PASSWORD);
|
||||
effectiveKeyPassword = keyStorePassword==null||keyStorePassword.isEmpty()?null:keyStorePassword.toCharArray();
|
||||
effectiveKeyAlias = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_JKS_CERT_ALIAS, null);
|
||||
|
||||
if(enableSslClientAuth && effectiveKeyAlias == null) {
|
||||
throw new IllegalArgumentException(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_JKS_CERT_ALIAS+" not given");
|
||||
}
|
||||
|
||||
effectiveTruststore = trustStore;
|
||||
effectiveKeystore = keyStore;
|
||||
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Use Trust-/Keystore to secure communication with LDAP server (client auth is {})", keyStore!=null);
|
||||
log.debug("keyStoreAlias: {}", effectiveKeyAlias);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
final List<String> enabledCipherSuites = sinkSettings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_ENABLED_SSL_CIPHERS, null);
|
||||
final List<String> enabledProtocols = sinkSettings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_EXTERNAL_ES_ENABLED_SSL_PROTOCOLS, DEFAULT_TLS_PROTOCOLS);
|
||||
|
||||
builder.setSupportedCipherSuites(enabledCipherSuites==null?null:enabledCipherSuites.toArray(new String[0]));
|
||||
builder.setSupportedProtocols(enabledProtocols.toArray(new String[0]));
|
||||
|
||||
builder.enableSsl(effectiveTruststore, verifyHostnames); //trust all aliases
|
||||
|
||||
if (enableSslClientAuth) {
|
||||
builder.setPkiCredentials(effectiveKeystore, effectiveKeyPassword, effectiveKeyAlias);
|
||||
}
|
||||
}
|
||||
|
||||
if (user != null && password != null) {
|
||||
builder.setBasicCredentials(user, password);
|
||||
}
|
||||
|
||||
client = builder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (client != null) {
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean doStore(final AuditMessage msg) {
|
||||
try {
|
||||
boolean successful = client.index(msg.toString(), getExpandedIndexName(indexPattern, index), type, true);
|
||||
if (!successful) {
|
||||
log.error("Unable to send audit log {} to one of these servers: {}", msg, servers);
|
||||
}
|
||||
return successful;
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to send audit log {} due to", msg, e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,86 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.joda.time.format.DateTimeFormat;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.HeaderHelper;
|
||||
|
||||
public final class InternalESSink extends AuditLogSink {
|
||||
|
||||
private final Client clientProvider;
|
||||
final String index;
|
||||
final String type;
|
||||
private DateTimeFormatter indexPattern;
|
||||
private final ThreadPool threadPool;
|
||||
|
||||
public InternalESSink(final String name, final Settings settings, final String settingsPrefix, final Path configPath, final Client clientProvider, ThreadPool threadPool, AuditLogSink fallbackSink) {
|
||||
super(name, settings, settingsPrefix, fallbackSink);
|
||||
this.clientProvider = clientProvider;
|
||||
Settings sinkSettings = getSinkSettings(settingsPrefix);
|
||||
|
||||
this.index = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_ES_INDEX, "'security-auditlog-'YYYY.MM.dd");
|
||||
this.type = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_ES_TYPE, "auditlog");
|
||||
|
||||
this.threadPool = threadPool;
|
||||
try {
|
||||
this.indexPattern = DateTimeFormat.forPattern(index);
|
||||
} catch (IllegalArgumentException e) {
|
||||
log.debug("Unable to parse index pattern due to {}. " + "If you have no date pattern configured you can safely ignore this message", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
|
||||
}
|
||||
|
||||
public boolean doStore(final AuditMessage msg) {
|
||||
|
||||
if (Boolean.parseBoolean((String) HeaderHelper.getSafeFromHeader(threadPool.getThreadContext(), ConfigConstants.OPENDISTRO_SECURITY_CONF_REQUEST_HEADER))) {
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("audit log of audit log will not be executed");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
try (StoredContext ctx = threadPool.getThreadContext().stashContext()) {
|
||||
try {
|
||||
final IndexRequestBuilder irb = clientProvider.prepareIndex(getExpandedIndexName(indexPattern, index), type).setRefreshPolicy(RefreshPolicy.IMMEDIATE).setSource(msg.getAsMap());
|
||||
threadPool.getThreadContext().putHeader(ConfigConstants.OPENDISTRO_SECURITY_CONF_REQUEST_HEADER, "true");
|
||||
irb.setTimeout(TimeValue.timeValueMinutes(1));
|
||||
irb.execute().actionGet();
|
||||
return true;
|
||||
} catch (final Exception e) {
|
||||
log.error("Unable to index audit log {} due to {}", msg, e.toString(), e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,137 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedActionException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.Properties;
|
||||
|
||||
import org.apache.kafka.clients.producer.Callback;
|
||||
import org.apache.kafka.clients.producer.KafkaProducer;
|
||||
import org.apache.kafka.clients.producer.Producer;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||
import org.apache.kafka.clients.producer.RecordMetadata;
|
||||
import org.apache.kafka.common.serialization.LongSerializer;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
|
||||
public class KafkaSink extends AuditLogSink {
|
||||
|
||||
private final String[] mandatoryProperties = new String []{"bootstrap_servers","topic_name"};
|
||||
private boolean valid = true;
|
||||
private Producer<Long, String> producer;
|
||||
private String topicName;
|
||||
|
||||
public KafkaSink(final String name, final Settings settings, final String settingsPrefix, AuditLogSink fallbackSink) {
|
||||
super(name, settings, settingsPrefix, fallbackSink);
|
||||
|
||||
Settings sinkSettings = settings.getAsSettings(settingsPrefix);
|
||||
checkMandatorySinkSettings(sinkSettings);
|
||||
|
||||
if (!valid) {
|
||||
log.error("Failed to configure Kafka producer, please check the logfile.");
|
||||
return;
|
||||
}
|
||||
|
||||
final Properties producerProps = new Properties();
|
||||
|
||||
for(String key: sinkSettings.names()) {
|
||||
if(!key.equals("topic_name")) {
|
||||
producerProps.put(key.replace('_', '.'), sinkSettings.get(key));
|
||||
}
|
||||
}
|
||||
|
||||
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, LongSerializer.class.getName());
|
||||
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
|
||||
topicName = sinkSettings.get("topic_name");
|
||||
|
||||
//map path of
|
||||
//ssl.keystore.location
|
||||
//ssl.truststore.location
|
||||
//sasl.kerberos.kinit.cmd
|
||||
|
||||
final SecurityManager sm = System.getSecurityManager();
|
||||
|
||||
if (sm != null) {
|
||||
sm.checkPermission(new SpecialPermission());
|
||||
}
|
||||
|
||||
try {
|
||||
this.producer = AccessController.doPrivileged(new PrivilegedExceptionAction<KafkaProducer<Long, String>>() {
|
||||
@Override
|
||||
public KafkaProducer<Long, String> run() throws Exception {
|
||||
return new KafkaProducer<Long, String>(producerProps);
|
||||
}
|
||||
});
|
||||
} catch (PrivilegedActionException e) {
|
||||
log.error("Failed to configure Kafka producer due to {}", e.getException(), e.getException());
|
||||
this.valid = false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doStore(AuditMessage msg) {
|
||||
if (!valid || producer == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
ProducerRecord<Long, String> data = new ProducerRecord<Long, String>(topicName, msg.toJson());
|
||||
producer.send(data, new Callback() {
|
||||
|
||||
@Override
|
||||
public void onCompletion(RecordMetadata metadata, Exception exception) {
|
||||
if(exception == null) {
|
||||
//log trace?
|
||||
} else {
|
||||
log.error("Could not store message on Kafka topic {}", topicName, exception);
|
||||
fallbackSink.store(msg);
|
||||
}
|
||||
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isHandlingBackpressure() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private void checkMandatorySinkSettings(Settings sinkSettings) {
|
||||
for(String mandatory: mandatoryProperties) {
|
||||
String value = sinkSettings.get(mandatory);
|
||||
if (value == null || value.length() == 0) {
|
||||
log.error("No value for {} provided in configuration, this endpoint will not work.", value);
|
||||
this.valid = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if(producer != null) {
|
||||
valid = false;
|
||||
producer.close();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,52 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
|
||||
public final class Log4JSink extends AuditLogSink {
|
||||
|
||||
final Logger auditLogger;
|
||||
final String loggerName;
|
||||
final Level logLevel;
|
||||
final boolean enabled;
|
||||
|
||||
public Log4JSink(final String name, final Settings settings, final String settingsPrefix, AuditLogSink fallbackSink) {
|
||||
super(name, settings, settingsPrefix, fallbackSink);
|
||||
loggerName = settings.get( settingsPrefix + ".log4j.logger_name","sgaudit");
|
||||
auditLogger = LogManager.getLogger(loggerName);
|
||||
logLevel = Level.toLevel(settings.get(settingsPrefix + ".log4j.level","INFO").toUpperCase());
|
||||
enabled = auditLogger.isEnabled(logLevel);
|
||||
}
|
||||
|
||||
public boolean isHandlingBackpressure() {
|
||||
return !enabled; //no submit to thread pool if not enabled
|
||||
}
|
||||
|
||||
|
||||
public boolean doStore(final AuditMessage msg) {
|
||||
if(enabled) {
|
||||
auditLogger.log(logLevel, msg.toJson());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,39 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
|
||||
public final class NoopSink extends AuditLogSink {
|
||||
|
||||
public NoopSink(String name, Settings settings, AuditLogSink fallbackSink) {
|
||||
super(name, settings, null, fallbackSink);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isHandlingBackpressure() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doStore(final AuditMessage msg) {
|
||||
//do nothing
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,177 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class SinkProvider {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
private static final String FALLBACKSINK_NAME = "fallback";
|
||||
private static final String DEFAULTSINK_NAME = "default";
|
||||
private final Client clientProvider;
|
||||
private final ThreadPool threadPool;
|
||||
private final Path configPath;
|
||||
private final Settings settings;
|
||||
final Map<String, AuditLogSink> allSinks = new HashMap<>();
|
||||
AuditLogSink defaultSink;
|
||||
AuditLogSink fallbackSink;
|
||||
|
||||
public SinkProvider(final Settings settings, final Client clientProvider, ThreadPool threadPool, final Path configPath) {
|
||||
this.settings = settings;
|
||||
this.clientProvider = clientProvider;
|
||||
this.threadPool = threadPool;
|
||||
this.configPath = configPath;
|
||||
|
||||
// fall back sink, make sure we don't lose messages
|
||||
String fallbackConfigPrefix = ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_ENDPOINTS + "." + FALLBACKSINK_NAME;
|
||||
Settings fallbackSinkSettings = settings.getAsSettings(fallbackConfigPrefix);
|
||||
if(!fallbackSinkSettings.isEmpty()) {
|
||||
this.fallbackSink = createSink(FALLBACKSINK_NAME, fallbackSinkSettings.get("type"), settings, fallbackConfigPrefix+".config");
|
||||
}
|
||||
|
||||
// make sure we always have a fallback to write to
|
||||
if (this.fallbackSink == null) {
|
||||
this.fallbackSink = new DebugSink(FALLBACKSINK_NAME, settings, null);
|
||||
}
|
||||
|
||||
allSinks.put(FALLBACKSINK_NAME, this.fallbackSink);
|
||||
|
||||
// create default sink
|
||||
defaultSink = this.createSink(DEFAULTSINK_NAME, settings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_TYPE_DEFAULT), settings, ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_DEFAULT);
|
||||
if (defaultSink == null) {
|
||||
log.error("Default endpoint could not be created, auditlog will not work properly.");
|
||||
return;
|
||||
}
|
||||
|
||||
allSinks.put(DEFAULTSINK_NAME, defaultSink);
|
||||
|
||||
// create all other sinks
|
||||
Map<String, Object> sinkSettingsMap = Utils.convertJsonToxToStructuredMap(settings.getAsSettings(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_ENDPOINTS));
|
||||
|
||||
for (Entry<String, Object> sinkEntry : sinkSettingsMap.entrySet()) {
|
||||
String sinkName = sinkEntry.getKey();
|
||||
// do not create fallback twice
|
||||
if(sinkName.equalsIgnoreCase(FALLBACKSINK_NAME)) {
|
||||
continue;
|
||||
}
|
||||
String type = settings.getAsSettings(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_ENDPOINTS + "." + sinkName).get("type");
|
||||
if (type == null) {
|
||||
log.error("No type defined for endpoint {}.", sinkName);
|
||||
continue;
|
||||
}
|
||||
AuditLogSink sink = createSink(sinkName, type, this.settings, ConfigConstants.OPENDISTRO_SECURITY_AUDIT_CONFIG_ENDPOINTS + "." + sinkName + ".config");
|
||||
if (sink == null) {
|
||||
log.error("Endpoint '{}' could not be created, check log file for further information.", sinkName);
|
||||
continue;
|
||||
}
|
||||
allSinks.put(sinkName.toLowerCase(), sink);
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("sink '{}' created successfully.", sinkName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public AuditLogSink getSink(String sinkName) {
|
||||
return allSinks.get(sinkName.toLowerCase());
|
||||
}
|
||||
|
||||
public AuditLogSink getDefaultSink() {
|
||||
return defaultSink;
|
||||
}
|
||||
|
||||
public void close() {
|
||||
for (AuditLogSink sink : allSinks.values()) {
|
||||
close(sink);
|
||||
}
|
||||
}
|
||||
|
||||
protected void close(AuditLogSink sink) {
|
||||
try {
|
||||
log.info("Closing {}", sink.getClass().getSimpleName());
|
||||
sink.close();
|
||||
} catch (Exception ex) {
|
||||
log.info("Could not close sink '{}' due to '{}'", sink.getClass().getSimpleName(), ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private final AuditLogSink createSink(final String name, final String type, final Settings settings, final String settingsPrefix) {
|
||||
AuditLogSink sink = null;
|
||||
if (type != null) {
|
||||
switch (type.toLowerCase()) {
|
||||
case "internal_elasticsearch":
|
||||
sink = new InternalESSink(name, settings, settingsPrefix, configPath, clientProvider, threadPool, fallbackSink);
|
||||
break;
|
||||
case "external_elasticsearch":
|
||||
try {
|
||||
sink = new ExternalESSink(name, settings, settingsPrefix, configPath, fallbackSink);
|
||||
} catch (Exception e) {
|
||||
log.error("Audit logging unavailable: Unable to setup HttpESAuditLog due to", e);
|
||||
}
|
||||
break;
|
||||
case "webhook":
|
||||
try {
|
||||
sink = new WebhookSink(name, settings, settingsPrefix, configPath, fallbackSink);
|
||||
} catch (Exception e1) {
|
||||
log.error("Audit logging unavailable: Unable to setup WebhookAuditLog due to", e1);
|
||||
}
|
||||
break;
|
||||
case "debug":
|
||||
sink = new DebugSink(name, settings, fallbackSink);
|
||||
break;
|
||||
case "noop":
|
||||
sink = new NoopSink(name, settings, fallbackSink);
|
||||
break;
|
||||
case "log4j":
|
||||
sink = new Log4JSink(name, settings, settingsPrefix, fallbackSink);
|
||||
break;
|
||||
case "kafka":
|
||||
sink = new KafkaSink(name, settings, settingsPrefix, fallbackSink);
|
||||
break;
|
||||
default:
|
||||
try {
|
||||
Class<?> delegateClass = Class.forName(type);
|
||||
if (AuditLogSink.class.isAssignableFrom(delegateClass)) {
|
||||
try {
|
||||
sink = (AuditLogSink) delegateClass.getConstructor(String.class, Settings.class, String.class, Path.class, Client.class, ThreadPool.class, AuditLogSink.class).newInstance(name, settings, settingsPrefix, configPath,
|
||||
clientProvider, threadPool, fallbackSink);
|
||||
} catch (Throwable e) {
|
||||
sink = (AuditLogSink) delegateClass.getConstructor(String.class, Settings.class, String.class, AuditLogSink.class).newInstance(name, settings, settingsPrefix, fallbackSink);
|
||||
}
|
||||
} else {
|
||||
log.error("Audit logging unavailable: '{}' is not a subclass of {}", type, AuditLogSink.class.getSimpleName());
|
||||
}
|
||||
} catch (Throwable e) { // we need really catch a Throwable here!
|
||||
log.error("Audit logging unavailable: Cannot instantiate object of class {} due to ", type, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
return sink;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,424 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.auditlog.sink;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.security.AccessController;
|
||||
import java.security.KeyStore;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.security.cert.X509Certificate;
|
||||
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.SSLConnectionSocketFactory;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.ssl.SSLContextBuilder;
|
||||
import org.apache.http.ssl.TrustStrategy;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.impl.AuditMessage;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.util.SSLConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.PemKeyReader;
|
||||
|
||||
public class WebhookSink extends AuditLogSink {
|
||||
|
||||
/* HttpClient is thread safe */
|
||||
private final CloseableHttpClient httpClient;
|
||||
|
||||
String webhookUrl = null;
|
||||
WebhookFormat webhookFormat = null;
|
||||
final boolean verifySSL;
|
||||
final KeyStore effectiveTruststore;
|
||||
|
||||
public WebhookSink(final String name, final Settings settings, final String settingsPrefix, final Path configPath, AuditLogSink fallbackSink) throws Exception {
|
||||
super(name, settings, settingsPrefix, fallbackSink);
|
||||
|
||||
Settings sinkSettings = settings.getAsSettings(settingsPrefix);
|
||||
|
||||
this.effectiveTruststore = getEffectiveKeyStore(configPath);
|
||||
|
||||
final String webhookUrl = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_WEBHOOK_URL);
|
||||
final String format = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_WEBHOOK_FORMAT);
|
||||
|
||||
verifySSL = sinkSettings.getAsBoolean(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_WEBHOOK_SSL_VERIFY, true);
|
||||
httpClient = getHttpClient();
|
||||
|
||||
if(httpClient == null) {
|
||||
log.error("Could not create HttpClient, audit log not available.");
|
||||
return;
|
||||
}
|
||||
|
||||
if (Strings.isEmpty(webhookUrl)) {
|
||||
log.error("opendistro_security.audit.config.webhook.url not provided, webhook audit log will not work");
|
||||
return;
|
||||
} else {
|
||||
try {
|
||||
// Sanity - check URL validity
|
||||
new URL(webhookUrl);
|
||||
this.webhookUrl = webhookUrl;
|
||||
} catch (MalformedURLException ex) {
|
||||
log.error("URL {} is invalid, webhook audit log will not work.", webhookUrl, ex);
|
||||
}
|
||||
}
|
||||
|
||||
if (Strings.isEmpty(format)) {
|
||||
log.warn("opendistro_security.audit.config.webhook.format not provided, falling back to 'text'");
|
||||
webhookFormat = WebhookFormat.TEXT;
|
||||
} else {
|
||||
try {
|
||||
webhookFormat = WebhookFormat.valueOf(format.toUpperCase());
|
||||
} catch (Exception ex) {
|
||||
log.error("Could not find WebhookFormat for type {}, falling back to 'text'", format, ex);
|
||||
webhookFormat = WebhookFormat.TEXT;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean doStore(AuditMessage msg) {
|
||||
if (Strings.isEmpty(webhookUrl)) {
|
||||
log.debug("Webhook URL is null");
|
||||
return false;
|
||||
}
|
||||
if (msg == null) {
|
||||
log.debug("Message is null");
|
||||
return true;
|
||||
}
|
||||
|
||||
return AccessController.doPrivileged(new PrivilegedAction<Boolean>() {
|
||||
|
||||
@Override
|
||||
public Boolean run() {
|
||||
boolean success = false;
|
||||
try {
|
||||
switch (webhookFormat.method) {
|
||||
case POST:
|
||||
success = post(msg);
|
||||
break;
|
||||
case GET:
|
||||
success =get(msg);
|
||||
break;
|
||||
default:
|
||||
log.error("Http Method '{}' defined in WebhookFormat '{}' not implemented yet", webhookFormat.method.name(),
|
||||
webhookFormat.name());
|
||||
}
|
||||
// log something in case endpoint is not reachable or did not return 200
|
||||
if (!success) {
|
||||
log.error(msg.toString());
|
||||
}
|
||||
return success;
|
||||
} catch(Throwable t) {
|
||||
log.error("Uncaught exception while trying to log message.", t);
|
||||
log.error(msg.toString());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if(httpClient != null) {
|
||||
httpClient.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Transforms an {@link AuditMessage} to JSON. By default, all fields are
|
||||
* included in the JSON string. This method can be overridden by subclasses
|
||||
* if a specific JSON format is needed.
|
||||
*
|
||||
* @param msg the AuditMessage to transform
|
||||
* @return the JSON string
|
||||
*/
|
||||
protected String formatJson(final AuditMessage msg) {
|
||||
return msg.toJson();
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms an {@link AuditMessage} to plain text. This method can be overridden
|
||||
* by subclasses if a specific text format is needed.
|
||||
*
|
||||
* @param msg the AuditMessage to transform
|
||||
* @return the text string
|
||||
*/
|
||||
protected String formatText(AuditMessage msg) {
|
||||
return msg.toText();
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms an {@link AuditMessage} to Slack format.
|
||||
* The default implementation returns
|
||||
* <p><blockquote><pre>
|
||||
* {
|
||||
* "text": "<AuditMessage#toText>"
|
||||
* }
|
||||
* </pre></blockquote>
|
||||
* <p>
|
||||
* Can be overridden by subclasses if a more specific format is needed.
|
||||
*
|
||||
* @param msg the AuditMessage to transform
|
||||
* @return the Slack formatted JSON string
|
||||
*/
|
||||
protected String formatSlack(AuditMessage msg) {
|
||||
return "{\"text\": \"" + msg.toText() + "\"}";
|
||||
}
|
||||
|
||||
/**
|
||||
* Transforms an {@link AuditMessage} to a query parameter String.
|
||||
* Used by {@link WebhookFormat#URL_PARAMETER_GET} and
|
||||
* Used by {@link WebhookFormat#URL_PARAMETER_POST}. Can be overridden by
|
||||
* subclasses if a specific format is needed.
|
||||
*
|
||||
* @param msg the AuditMessage to transform
|
||||
* @return the query parameter string
|
||||
*/
|
||||
protected String formatUrlParameters(AuditMessage msg) {
|
||||
return msg.toUrlParameters();
|
||||
}
|
||||
|
||||
boolean get(AuditMessage msg) {
|
||||
switch (webhookFormat) {
|
||||
case URL_PARAMETER_GET:
|
||||
return doGet(webhookUrl + formatUrlParameters(msg));
|
||||
default:
|
||||
log.error("WebhookFormat '{}' not implemented yet", webhookFormat.name());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean doGet(String url) {
|
||||
HttpGet httpGet = new HttpGet(url);
|
||||
CloseableHttpResponse serverResponse = null;
|
||||
try {
|
||||
serverResponse = httpClient.execute(httpGet);
|
||||
int responseCode = serverResponse.getStatusLine().getStatusCode();
|
||||
if (responseCode != HttpStatus.SC_OK) {
|
||||
log.error("Cannot GET to webhook URL '{}', server returned status {}", webhookUrl, responseCode);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
log.error("Cannot GET to webhook URL '{}'", webhookUrl, e);
|
||||
return false;
|
||||
} finally {
|
||||
try {
|
||||
if (serverResponse != null) {
|
||||
serverResponse.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("Cannot close server response", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
boolean post(AuditMessage msg) {
|
||||
|
||||
String payload;
|
||||
String url = webhookUrl;
|
||||
|
||||
switch (webhookFormat) {
|
||||
case JSON:
|
||||
payload = formatJson(msg);
|
||||
break;
|
||||
case TEXT:
|
||||
payload = formatText(msg);
|
||||
break;
|
||||
case SLACK:
|
||||
payload = "{\"text\": \"" + msg.toText() + "\"}";
|
||||
break;
|
||||
case URL_PARAMETER_POST:
|
||||
payload = "";
|
||||
url = webhookUrl + formatUrlParameters(msg);
|
||||
break;
|
||||
default:
|
||||
log.error("WebhookFormat '{}' not implemented yet", webhookFormat.name());
|
||||
return false;
|
||||
}
|
||||
|
||||
return doPost(url, payload);
|
||||
|
||||
}
|
||||
|
||||
protected boolean doPost(String url, String payload) {
|
||||
|
||||
HttpPost postRequest = new HttpPost(url);
|
||||
|
||||
StringEntity input = new StringEntity(payload, StandardCharsets.UTF_8);
|
||||
input.setContentType(webhookFormat.contentType.toString());
|
||||
postRequest.setEntity(input);
|
||||
|
||||
CloseableHttpResponse serverResponse = null;
|
||||
try {
|
||||
serverResponse = httpClient.execute(postRequest);
|
||||
int responseCode = serverResponse.getStatusLine().getStatusCode();
|
||||
if (responseCode != HttpStatus.SC_OK) {
|
||||
log.error("Cannot POST to webhook URL '{}', server returned status {}", webhookUrl, responseCode);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
} catch (Throwable e) {
|
||||
log.error("Cannot POST to webhook URL '{}' due to '{}'", webhookUrl, e.getMessage(), e);
|
||||
return false;
|
||||
} finally {
|
||||
try {
|
||||
if (serverResponse != null) {
|
||||
serverResponse.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
log.error("Cannot close server response", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private KeyStore getEffectiveKeyStore(final Path configPath) {
|
||||
|
||||
return AccessController.doPrivileged(new PrivilegedAction<KeyStore>() {
|
||||
|
||||
@Override
|
||||
public KeyStore run() {
|
||||
try {
|
||||
Settings sinkSettings = settings.getAsSettings(settingsPrefix);
|
||||
|
||||
final boolean pem = sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_WEBHOOK_PEMTRUSTEDCAS_FILEPATH, null) != null
|
||||
|| sinkSettings.get(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_WEBHOOK_PEMTRUSTEDCAS_CONTENT, null) != null;
|
||||
|
||||
if(pem) {
|
||||
X509Certificate[] trustCertificates = PemKeyReader.loadCertificatesFromStream(PemKeyReader.resolveStream(ConfigConstants.OPENDISTRO_SECURITY_AUDIT_WEBHOOK_PEMTRUSTEDCAS_CONTENT, sinkSettings));
|
||||
|
||||
if(trustCertificates == null) {
|
||||
String fullPath = settingsPrefix + "." + ConfigConstants.OPENDISTRO_SECURITY_AUDIT_WEBHOOK_PEMTRUSTEDCAS_FILEPATH;
|
||||
trustCertificates = PemKeyReader.loadCertificatesFromFile(PemKeyReader.resolve(fullPath, settings, configPath, false));
|
||||
}
|
||||
|
||||
return PemKeyReader.toTruststore("alw", trustCertificates);
|
||||
|
||||
|
||||
} else {
|
||||
return PemKeyReader.loadKeyStore(PemKeyReader.resolve(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_FILEPATH, settings, configPath, false)
|
||||
, settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_PASSWORD, SSLConfigConstants.DEFAULT_STORE_PASSWORD)
|
||||
, settings.get(SSLConfigConstants.OPENDISTRO_SECURITY_SSL_TRANSPORT_TRUSTSTORE_TYPE));
|
||||
}
|
||||
} catch(Exception ex) {
|
||||
log.error("Could not load key material. Make sure your certificates are located relative to the config directory", ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
CloseableHttpClient getHttpClient() {
|
||||
|
||||
// TODO: set a timeout until we have a proper way to deal with back pressure
|
||||
int timeout = 5;
|
||||
|
||||
RequestConfig config = RequestConfig.custom()
|
||||
.setConnectTimeout(timeout * 1000)
|
||||
.setConnectionRequestTimeout(timeout * 1000)
|
||||
.setSocketTimeout(timeout * 1000).build();
|
||||
|
||||
final TrustStrategy trustAllStrategy = new TrustStrategy() {
|
||||
@Override
|
||||
public boolean isTrusted(X509Certificate[] chain, String authType) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
if(!verifySSL) {
|
||||
return HttpClients.custom()
|
||||
.setSSLSocketFactory(
|
||||
new SSLConnectionSocketFactory(
|
||||
new SSLContextBuilder()
|
||||
.loadTrustMaterial(trustAllStrategy)
|
||||
.build(),
|
||||
NoopHostnameVerifier.INSTANCE))
|
||||
.setDefaultRequestConfig(config)
|
||||
.build();
|
||||
}
|
||||
|
||||
if(effectiveTruststore == null) {
|
||||
return HttpClients.custom()
|
||||
.setDefaultRequestConfig(config)
|
||||
.build();
|
||||
}
|
||||
|
||||
return HttpClients.custom()
|
||||
.setSSLSocketFactory(
|
||||
new SSLConnectionSocketFactory(
|
||||
new SSLContextBuilder()
|
||||
.loadTrustMaterial(effectiveTruststore, null)
|
||||
.build(),
|
||||
new DefaultHostnameVerifier()))
|
||||
.setDefaultRequestConfig(config)
|
||||
.build();
|
||||
|
||||
|
||||
} catch(Exception ex) {
|
||||
log.error("Could not create HTTPClient due to {}, audit log not available.", ex.getMessage(), ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static enum WebhookFormat {
|
||||
URL_PARAMETER_GET(HttpMethod.GET, ContentType.TEXT_PLAIN),
|
||||
URL_PARAMETER_POST(HttpMethod.POST, ContentType.TEXT_PLAIN),
|
||||
TEXT(HttpMethod.POST, ContentType.TEXT_PLAIN),
|
||||
JSON(HttpMethod.POST, ContentType.APPLICATION_JSON),
|
||||
SLACK(HttpMethod.POST, ContentType.APPLICATION_JSON);
|
||||
|
||||
private HttpMethod method;
|
||||
private ContentType contentType;
|
||||
|
||||
private WebhookFormat(HttpMethod method, ContentType contentType) {
|
||||
this.method = method;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
HttpMethod getMethod() {
|
||||
return method;
|
||||
}
|
||||
|
||||
ContentType getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
private static enum HttpMethod {
|
||||
GET,
|
||||
POST;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,171 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.compliance;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine.Delete;
|
||||
import org.elasticsearch.index.engine.Engine.DeleteResult;
|
||||
import org.elasticsearch.index.engine.Engine.Index;
|
||||
import org.elasticsearch.index.engine.Engine.IndexResult;
|
||||
import org.elasticsearch.index.get.GetResult;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceIndexingOperationListener;
|
||||
|
||||
public final class ComplianceIndexingOperationListenerImpl extends ComplianceIndexingOperationListener {
|
||||
|
||||
private static final Logger log = LogManager.getLogger(ComplianceIndexingOperationListenerImpl.class);
|
||||
private final ComplianceConfig complianceConfig;
|
||||
private final AuditLog auditlog;
|
||||
private volatile IndexService is;
|
||||
|
||||
public ComplianceIndexingOperationListenerImpl(final ComplianceConfig complianceConfig, final AuditLog auditlog) {
|
||||
super();
|
||||
this.complianceConfig = complianceConfig;
|
||||
this.auditlog = auditlog;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setIs(final IndexService is) {
|
||||
if(this.is != null) {
|
||||
throw new ElasticsearchException("Index service already set");
|
||||
}
|
||||
this.is = is;
|
||||
}
|
||||
|
||||
private static final class Context {
|
||||
private final GetResult getResult;
|
||||
|
||||
public Context(GetResult getResult) {
|
||||
super();
|
||||
this.getResult = getResult;
|
||||
}
|
||||
|
||||
public GetResult getGetResult() {
|
||||
return getResult;
|
||||
}
|
||||
}
|
||||
|
||||
private static final ThreadLocal<Context> threadContext = new ThreadLocal<Context>();
|
||||
|
||||
@Override
|
||||
public void postDelete(final ShardId shardId, final Delete delete, final DeleteResult result) {
|
||||
if(complianceConfig.isEnabled()) {
|
||||
Objects.requireNonNull(is);
|
||||
if(result.getFailure() == null && result.isFound() && delete.origin() == org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY) {
|
||||
auditlog.logDocumentDeleted(shardId, delete, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Index preIndex(final ShardId shardId, final Index index) {
|
||||
if(complianceConfig.isEnabled() && complianceConfig.logDiffsForWrite()) {
|
||||
Objects.requireNonNull(is);
|
||||
|
||||
final IndexShard shard;
|
||||
|
||||
if (index.origin() != org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY) {
|
||||
return index;
|
||||
}
|
||||
|
||||
if((shard = is.getShardOrNull(shardId.getId())) == null) {
|
||||
return index;
|
||||
}
|
||||
|
||||
if (shard.isReadAllowed()) {
|
||||
try {
|
||||
|
||||
final GetResult getResult = shard.getService().getForUpdate(index.type(), index.id(),
|
||||
index.version(), index.versionType());
|
||||
|
||||
if (getResult.isExists()) {
|
||||
threadContext.set(new Context(getResult));
|
||||
} else {
|
||||
threadContext.set(new Context(null));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Cannot retrieve original document due to {}", e.toString());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("Cannot read from shard {}", shardId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void postIndex(final ShardId shardId, final Index index, final Exception ex) {
|
||||
if(complianceConfig.isEnabled() && complianceConfig.logDiffsForWrite()) {
|
||||
threadContext.remove();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postIndex(ShardId shardId, Index index, IndexResult result) {
|
||||
if(complianceConfig.isEnabled() && complianceConfig.logDiffsForWrite()) {
|
||||
final Context context = threadContext.get();
|
||||
final GetResult previousContent = context==null?null:context.getGetResult();
|
||||
threadContext.remove();
|
||||
Objects.requireNonNull(is);
|
||||
|
||||
if (result.getFailure() != null || index.origin() != org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(is.getShardOrNull(shardId.getId()) == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(previousContent == null) {
|
||||
//no previous content
|
||||
if(!result.isCreated()) {
|
||||
log.warn("No previous content and not created (its an update but do not find orig source) for {}", index.startTime()+"/"+shardId+"/"+index.type()+"/"+index.id());
|
||||
}
|
||||
assert result.isCreated():"No previous content and not created";
|
||||
} else {
|
||||
if(result.isCreated()) {
|
||||
log.warn("Previous content and created for {}",index.startTime()+"/"+shardId+"/"+index.type()+"/"+index.id());
|
||||
}
|
||||
assert !result.isCreated():"Previous content and created";
|
||||
}
|
||||
|
||||
auditlog.logDocumentWritten(shardId, previousContent, index, result, complianceConfig);
|
||||
} else if (complianceConfig.isEnabled()) {
|
||||
//no diffs
|
||||
if (result.getFailure() != null || index.origin() != org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY) {
|
||||
return;
|
||||
}
|
||||
|
||||
auditlog.logDocumentWritten(shardId, null, index, result, complianceConfig);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,246 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.compliance;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.lucene.index.FieldInfo;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.HeaderHelper;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.SourceFieldsContext;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.WildcardMatcher;
|
||||
import com.github.wnameless.json.flattener.JsonFlattener;
|
||||
|
||||
//TODO We need to deal with caching!!
|
||||
//Currently we disable caching (and realtime requests) when FLS or DLS is applied
|
||||
//Check if we can hook in into the caches
|
||||
|
||||
//stored fields are already done here
|
||||
|
||||
public final class FieldReadCallback {
|
||||
|
||||
private static final Logger log = LogManager.getLogger(FieldReadCallback.class);
|
||||
//private final ThreadContext threadContext;
|
||||
//private final ClusterService clusterService;
|
||||
private final Index index;
|
||||
private final ComplianceConfig complianceConfig;
|
||||
private final Set<String> maskedFields;
|
||||
private final AuditLog auditLog;
|
||||
private Function<Map<String, ?>, Map<String, Object>> filterFunction;
|
||||
private SourceFieldsContext sfc;
|
||||
private Doc doc;
|
||||
private final ShardId shardId;
|
||||
|
||||
public FieldReadCallback(final ThreadContext threadContext, final IndexService indexService,
|
||||
final ClusterService clusterService, final ComplianceConfig complianceConfig, final AuditLog auditLog,
|
||||
final Set<String> maskedFields, ShardId shardId) {
|
||||
super();
|
||||
//this.threadContext = Objects.requireNonNull(threadContext);
|
||||
//this.clusterService = Objects.requireNonNull(clusterService);
|
||||
this.index = Objects.requireNonNull(indexService).index();
|
||||
this.complianceConfig = complianceConfig;
|
||||
this.auditLog = auditLog;
|
||||
this.maskedFields = maskedFields;
|
||||
this.shardId = shardId;
|
||||
try {
|
||||
sfc = (SourceFieldsContext) HeaderHelper.deserializeSafeFromHeader(threadContext, "_opendistro_security_source_field_context");
|
||||
if(sfc != null && sfc.hasIncludesOrExcludes()) {
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("_opendistro_security_source_field_context: "+sfc);
|
||||
}
|
||||
|
||||
filterFunction = XContentMapValues.filter(sfc.getIncludes(), sfc.getExcludes());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Cannot deserialize _opendistro_security_source_field_context because of {}", e.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean recordField(final String fieldName, boolean isStringField) {
|
||||
boolean masked = false;
|
||||
if(isStringField && maskedFields != null && maskedFields.size() > 0) {
|
||||
masked = WildcardMatcher.matchAny(maskedFields, fieldName);
|
||||
}
|
||||
return !masked && complianceConfig.readHistoryEnabledForField(index.getName(), fieldName);
|
||||
}
|
||||
|
||||
public void binaryFieldRead(final FieldInfo fieldInfo, byte[] fieldValue) {
|
||||
try {
|
||||
if(!recordField(fieldInfo.name, false) && !fieldInfo.name.equals("_source") && !fieldInfo.name.equals("_id")) {
|
||||
return;
|
||||
}
|
||||
|
||||
if(fieldInfo.name.equals("_source")) {
|
||||
|
||||
if(filterFunction != null) {
|
||||
final BytesReference bytesRef = new BytesArray(fieldValue);
|
||||
final Tuple<XContentType, Map<String, Object>> bytesRefTuple = XContentHelper.convertToMap(bytesRef, false, XContentType.JSON);
|
||||
Map<String, Object> filteredSource = bytesRefTuple.v2();
|
||||
|
||||
//if (!canOptimize) {
|
||||
filteredSource = filterFunction.apply(bytesRefTuple.v2());
|
||||
/*} else {
|
||||
if (!excludesSet.isEmpty()) {
|
||||
filteredSource.keySet().removeAll(excludesSet);
|
||||
} else {
|
||||
filteredSource.keySet().retainAll(includesSet);
|
||||
}
|
||||
}*/
|
||||
|
||||
final XContentBuilder xBuilder = XContentBuilder.builder(bytesRefTuple.v1().xContent()).map(filteredSource);
|
||||
fieldValue = BytesReference.toBytes(BytesReference.bytes(xBuilder));
|
||||
}
|
||||
|
||||
Map<String, Object> filteredSource = new JsonFlattener(new String(fieldValue, StandardCharsets.UTF_8)).flattenAsMap();
|
||||
for(String k: filteredSource.keySet()) {
|
||||
if(!recordField(k, filteredSource.get(k) instanceof String)) {
|
||||
continue;
|
||||
}
|
||||
fieldRead0(k, filteredSource.get(k));
|
||||
}
|
||||
} else if (fieldInfo.name.equals("_id")) {
|
||||
fieldRead0(fieldInfo.name, Uid.decodeId(fieldValue));
|
||||
} else {
|
||||
fieldRead0(fieldInfo.name, new String(fieldValue, StandardCharsets.UTF_8));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Unexpected error reading binary field '{}' in index '{}'", fieldInfo.name, index.getName());
|
||||
}
|
||||
}
|
||||
|
||||
public void stringFieldRead(final FieldInfo fieldInfo, final byte[] fieldValue) {
|
||||
try {
|
||||
if(!recordField(fieldInfo.name, true)) {
|
||||
return;
|
||||
}
|
||||
fieldRead0(fieldInfo.name, new String(fieldValue, StandardCharsets.UTF_8));
|
||||
} catch (Exception e) {
|
||||
log.error("Unexpected error reading string field '{}' in index '{}'", fieldInfo.name, index.getName());
|
||||
}
|
||||
}
|
||||
|
||||
public void numericFieldRead(final FieldInfo fieldInfo, final Number fieldValue) {
|
||||
try {
|
||||
if(!recordField(fieldInfo.name, false)) {
|
||||
return;
|
||||
}
|
||||
fieldRead0(fieldInfo.name, fieldValue);
|
||||
} catch (Exception e) {
|
||||
log.error("Unexpected error reading numeric field '{}' in index '{}'", fieldInfo.name, index.getName());
|
||||
}
|
||||
}
|
||||
|
||||
private void fieldRead0(final String fieldName, final Object fieldValue) {
|
||||
if(doc != null) {
|
||||
if(fieldName.equals("_id")) {
|
||||
doc.setId(fieldValue.toString());
|
||||
} else {
|
||||
doc.addField(new Field(fieldName, fieldValue));
|
||||
}
|
||||
} else {
|
||||
final String indexName = index.getName();
|
||||
if(fieldName.equals("_id")) {
|
||||
doc = new Doc(indexName, fieldValue.toString());
|
||||
} else {
|
||||
doc = new Doc(indexName, null);
|
||||
doc.addField(new Field(fieldName, fieldValue));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void finished() {
|
||||
if(doc == null) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
Map<String, String> f = new HashMap<String, String>();
|
||||
for(Field fi: doc.fields) {
|
||||
f.put(fi.fieldName, String.valueOf(fi.fieldValue));
|
||||
}
|
||||
auditLog.logDocumentRead(doc.indexName, doc.id, shardId, f, complianceConfig);
|
||||
} catch (Exception e) {
|
||||
log.error("Unexpected error finished compliance read entry {} in index '{}': {}", doc.id, index.getName(), e.toString(), e);
|
||||
} finally {
|
||||
doc = null;
|
||||
sfc = null;
|
||||
}
|
||||
}
|
||||
|
||||
private class Doc {
|
||||
final String indexName;
|
||||
String id;
|
||||
final List<Field> fields = new ArrayList<Field>();
|
||||
|
||||
public Doc(String indexName, String id) {
|
||||
super();
|
||||
this.indexName = indexName;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public void addField(Field f) {
|
||||
fields.add(f);
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Doc [indexName=" + indexName + ", id=" + id + ", fields=" + fields + "]";
|
||||
}
|
||||
}
|
||||
|
||||
private class Field {
|
||||
final String fieldName;
|
||||
final Object fieldValue;
|
||||
public Field(String fieldName, Object fieldValue) {
|
||||
super();
|
||||
this.fieldName = fieldName;
|
||||
this.fieldValue = fieldValue;
|
||||
}
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Field [fieldName=" + fieldName + ", fieldValue=" + fieldValue + "]";
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,125 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.configuration;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.elasticsearch.ElasticsearchSecurityException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.RealtimeRequest;
|
||||
import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
||||
import org.elasticsearch.action.bulk.BulkItemRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.DlsFlsRequestValve;
|
||||
|
||||
public class DlsFlsValveImpl implements DlsFlsRequestValve {
|
||||
|
||||
/**
|
||||
*
|
||||
* @param request
|
||||
* @param listener
|
||||
* @return false on error
|
||||
*/
|
||||
public boolean invoke(final ActionRequest request, final ActionListener<?> listener,
|
||||
final Map<String,Set<String>> allowedFlsFields,
|
||||
final Map<String,Set<String>> maskedFields,
|
||||
final Map<String,Set<String>> queries) {
|
||||
|
||||
final boolean fls = allowedFlsFields != null && !allowedFlsFields.isEmpty();
|
||||
final boolean masked = maskedFields != null && !maskedFields.isEmpty();
|
||||
final boolean dls = queries != null && !queries.isEmpty();
|
||||
|
||||
if(fls || masked || dls) {
|
||||
|
||||
if(request instanceof RealtimeRequest) {
|
||||
((RealtimeRequest) request).realtime(Boolean.FALSE);
|
||||
}
|
||||
|
||||
if(request instanceof SearchRequest) {
|
||||
((SearchRequest)request).requestCache(Boolean.FALSE);
|
||||
}
|
||||
|
||||
if(request instanceof UpdateRequest) {
|
||||
listener.onFailure(new ElasticsearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated"));
|
||||
return false;
|
||||
}
|
||||
|
||||
if(request instanceof BulkRequest) {
|
||||
for(DocWriteRequest<?> inner:((BulkRequest) request).requests()) {
|
||||
if(inner instanceof UpdateRequest) {
|
||||
listener.onFailure(new ElasticsearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated"));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(request instanceof BulkShardRequest) {
|
||||
for(BulkItemRequest inner:((BulkShardRequest) request).items()) {
|
||||
if(inner.request() instanceof UpdateRequest) {
|
||||
listener.onFailure(new ElasticsearchSecurityException("Update is not supported when FLS or DLS or Fieldmasking is activated"));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(request instanceof ResizeRequest) {
|
||||
listener.onFailure(new ElasticsearchSecurityException("Resize is not supported when FLS or DLS or Fieldmasking is activated"));
|
||||
return false;
|
||||
}
|
||||
|
||||
/*if(request instanceof IndicesAliasesRequest) {
|
||||
final IndicesAliasesRequest aliasRequest = (IndicesAliasesRequest) request;
|
||||
aliasRequest.getAliasActions().stream().filter(a->a.actionType() == Type.ADD).forEach(a->{
|
||||
|
||||
|
||||
});
|
||||
|
||||
listener.onFailure(new ElasticsearchSecurityException("Managing aliases is not supported when FLS or DLS is activated"));
|
||||
return false;
|
||||
}*/
|
||||
}
|
||||
|
||||
if(dls) {
|
||||
if(request instanceof SearchRequest) {
|
||||
final SearchSourceBuilder source = ((SearchRequest)request).source();
|
||||
if(source != null) {
|
||||
|
||||
if(source.profile()) {
|
||||
listener.onFailure(new ElasticsearchSecurityException("Profiling is not supported when DLS is activated"));
|
||||
return false;
|
||||
}
|
||||
|
||||
//if(source.suggest() != null) {
|
||||
// listener.onFailure(new ElasticsearchSecurityException("Suggest is not supported when DLS is activated"));
|
||||
// return false;
|
||||
//}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,120 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.configuration;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.apache.lucene.search.join.ToChildBlockJoinQuery;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.query.AbstractQueryBuilder;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.support.OpenDistroSecurityDeprecationHandler;
|
||||
import com.google.common.cache.Cache;
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
|
||||
|
||||
final class DlsQueryParser {
|
||||
|
||||
private static final Query NON_NESTED_QUERY;
|
||||
|
||||
static {
|
||||
//Match all documents but not the nested ones
|
||||
//Nested document types start with __
|
||||
//https://discuss.elastic.co/t/whats-nested-documents-layout-inside-the-lucene/59944/9
|
||||
NON_NESTED_QUERY = new BooleanQuery.Builder()
|
||||
.add(new MatchAllDocsQuery(), Occur.FILTER)
|
||||
.add(new PrefixQuery(new Term("_type", "__")), Occur.MUST_NOT)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private static Cache<String, QueryBuilder> queries = CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(4, TimeUnit.HOURS)
|
||||
.build();
|
||||
|
||||
private DlsQueryParser() {
|
||||
|
||||
}
|
||||
|
||||
static Query parse(final Set<String> unparsedDlsQueries, final QueryShardContext queryShardContext,
|
||||
final NamedXContentRegistry namedXContentRegistry) throws IOException {
|
||||
|
||||
if (unparsedDlsQueries == null || unparsedDlsQueries.isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final boolean hasNestedMapping = queryShardContext.getMapperService().hasNested();
|
||||
|
||||
BooleanQuery.Builder dlsQueryBuilder = new BooleanQuery.Builder();
|
||||
dlsQueryBuilder.setMinimumNumberShouldMatch(1);
|
||||
|
||||
for (final String unparsedDlsQuery : unparsedDlsQueries) {
|
||||
try {
|
||||
|
||||
final QueryBuilder qb = queries.get(unparsedDlsQuery, new Callable<QueryBuilder>() {
|
||||
|
||||
@Override
|
||||
public QueryBuilder call() throws Exception {
|
||||
final XContentParser parser = JsonXContent.jsonXContent.createParser(namedXContentRegistry, OpenDistroSecurityDeprecationHandler.INSTANCE, unparsedDlsQuery);
|
||||
final QueryBuilder qb = AbstractQueryBuilder.parseInnerQueryBuilder(parser);
|
||||
return qb;
|
||||
}
|
||||
|
||||
});
|
||||
final ParsedQuery parsedQuery = queryShardContext.toFilter(qb);
|
||||
final Query dlsQuery = parsedQuery.query();
|
||||
dlsQueryBuilder.add(dlsQuery, Occur.SHOULD);
|
||||
|
||||
if (hasNestedMapping) {
|
||||
handleNested(queryShardContext, dlsQueryBuilder, dlsQuery);
|
||||
}
|
||||
|
||||
} catch (ExecutionException e) {
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
// no need for scoring here, so its possible to wrap this in a
|
||||
// ConstantScoreQuery
|
||||
return new ConstantScoreQuery(dlsQueryBuilder.build());
|
||||
|
||||
}
|
||||
|
||||
private static void handleNested(final QueryShardContext queryShardContext,
|
||||
final BooleanQuery.Builder dlsQueryBuilder,
|
||||
final Query parentQuery) {
|
||||
final BitSetProducer parentDocumentsFilter = queryShardContext.bitsetFilter(NON_NESTED_QUERY);
|
||||
dlsQueryBuilder.add(new ToChildBlockJoinQuery(parentQuery, parentDocumentsFilter), Occur.SHOULD);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,243 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.configuration;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.bouncycastle.crypto.digests.Blake2bDigest;
|
||||
import org.bouncycastle.util.encoders.Hex;
|
||||
|
||||
import com.google.common.base.Splitter;
|
||||
|
||||
public class MaskedField {
|
||||
|
||||
private final String name;
|
||||
private String algo = null;
|
||||
private List<RegexReplacement> regexReplacements;
|
||||
private final byte[] defaultSalt;
|
||||
|
||||
public MaskedField(final String value, final byte[] defaultSalt) {
|
||||
this.defaultSalt = defaultSalt;
|
||||
final List<String> tokens = Splitter.on("::").splitToList(Objects.requireNonNull(value));
|
||||
final int tokenCount = tokens.size();
|
||||
if (tokenCount == 1) {
|
||||
name = tokens.get(0);
|
||||
} else if (tokenCount == 2) {
|
||||
name = tokens.get(0);
|
||||
algo = tokens.get(1);
|
||||
} else if (tokenCount >= 3 && tokenCount%2==1) {
|
||||
name = tokens.get(0);
|
||||
regexReplacements = new ArrayList<>((tokenCount-1)/2);
|
||||
for(int i=1; i<tokenCount-1; i=i+2) {
|
||||
regexReplacements.add(new RegexReplacement(tokens.get(i), tokens.get(i+1)));
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Expected 1 or 2 or >=3 (but then odd count) tokens, got " + tokenCount);
|
||||
}
|
||||
}
|
||||
|
||||
public final void isValid() throws Exception {
|
||||
mask(new byte[] {1,2,3,4,5});
|
||||
}
|
||||
|
||||
public byte[] mask(byte[] value) {
|
||||
if (isDefault()) {
|
||||
return blake2bHash(value);
|
||||
} else {
|
||||
return customHash(value);
|
||||
}
|
||||
}
|
||||
|
||||
public String mask(String value) {
|
||||
if (isDefault()) {
|
||||
return blake2bHash(value);
|
||||
} else {
|
||||
return customHash(value);
|
||||
}
|
||||
}
|
||||
|
||||
public BytesRef mask(BytesRef value) {
|
||||
if (isDefault()) {
|
||||
return blake2bHash(value);
|
||||
} else {
|
||||
return customHash(value);
|
||||
}
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((algo == null) ? 0 : algo.hashCode());
|
||||
result = prime * result + ((name == null) ? 0 : name.hashCode());
|
||||
result = prime * result + ((regexReplacements == null) ? 0 : regexReplacements.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
MaskedField other = (MaskedField) obj;
|
||||
if (algo == null) {
|
||||
if (other.algo != null)
|
||||
return false;
|
||||
} else if (!algo.equals(other.algo))
|
||||
return false;
|
||||
if (name == null) {
|
||||
if (other.name != null)
|
||||
return false;
|
||||
} else if (!name.equals(other.name))
|
||||
return false;
|
||||
if (regexReplacements == null) {
|
||||
if (other.regexReplacements != null)
|
||||
return false;
|
||||
} else if (!regexReplacements.equals(other.regexReplacements))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "MaskedField [name=" + name + ", algo=" + algo + ", regexReplacements=" + regexReplacements
|
||||
+ ", defaultSalt=" + Arrays.toString(defaultSalt) + ", isDefault()=" + isDefault() + "]";
|
||||
}
|
||||
|
||||
private boolean isDefault() {
|
||||
return regexReplacements == null && algo == null;
|
||||
}
|
||||
|
||||
private byte[] customHash(byte[] in) {
|
||||
if (algo != null) {
|
||||
try {
|
||||
MessageDigest digest = MessageDigest.getInstance(algo);
|
||||
return Hex.encode(digest.digest(in));
|
||||
} catch (NoSuchAlgorithmException e) {
|
||||
throw new IllegalArgumentException(e);
|
||||
}
|
||||
} else if (regexReplacements != null) {
|
||||
String cur = new String(in, StandardCharsets.UTF_8);
|
||||
for(RegexReplacement rr: regexReplacements) {
|
||||
cur = cur.replaceAll(rr.getRegex(), rr.getReplacement());
|
||||
}
|
||||
return cur.getBytes(StandardCharsets.UTF_8);
|
||||
|
||||
} else {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
private BytesRef customHash(BytesRef in) {
|
||||
final BytesRef copy = BytesRef.deepCopyOf(in);
|
||||
return new BytesRef(customHash(copy.bytes));
|
||||
}
|
||||
|
||||
private String customHash(String in) {
|
||||
return new String(customHash(in.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
private byte[] blake2bHash(byte[] in) {
|
||||
final Blake2bDigest hash = new Blake2bDigest(null, 32, null, defaultSalt);
|
||||
hash.update(in, 0, in.length);
|
||||
final byte[] out = new byte[hash.getDigestSize()];
|
||||
hash.doFinal(out, 0);
|
||||
return Hex.encode(out);
|
||||
}
|
||||
|
||||
private BytesRef blake2bHash(BytesRef in) {
|
||||
final BytesRef copy = BytesRef.deepCopyOf(in);
|
||||
return new BytesRef(blake2bHash(copy.bytes));
|
||||
}
|
||||
|
||||
private String blake2bHash(String in) {
|
||||
return new String(blake2bHash(in.getBytes(StandardCharsets.UTF_8)), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
private static class RegexReplacement {
|
||||
private final String regex;
|
||||
private final String replacement;
|
||||
|
||||
public RegexReplacement(String regex, String replacement) {
|
||||
super();
|
||||
this.regex = regex.substring(1).substring(0, regex.length()-2);
|
||||
this.replacement = replacement;
|
||||
}
|
||||
|
||||
public String getRegex() {
|
||||
return regex;
|
||||
}
|
||||
|
||||
public String getReplacement() {
|
||||
return replacement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((regex == null) ? 0 : regex.hashCode());
|
||||
result = prime * result + ((replacement == null) ? 0 : replacement.hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (obj == null)
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
RegexReplacement other = (RegexReplacement) obj;
|
||||
if (regex == null) {
|
||||
if (other.regex != null)
|
||||
return false;
|
||||
} else if (!regex.equals(other.regex))
|
||||
return false;
|
||||
if (replacement == null) {
|
||||
if (other.replacement != null)
|
||||
return false;
|
||||
} else if (!replacement.equals(other.replacement))
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "RegexReplacement [regex=" + regex + ", replacement=" + replacement + "]";
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -0,0 +1,129 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.configuration;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.BitSetProducer;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.engine.EngineException;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.index.shard.ShardUtils;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.compliance.ComplianceIndexingOperationListener;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.EmptyFilterLeafReader;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.OpenDistroSecurityIndexSearcherWrapper;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.HeaderHelper;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.OpenDistroSecurityUtils;
|
||||
import com.google.common.collect.Sets;
|
||||
|
||||
public class OpenDistroSecurityFlsDlsIndexSearcherWrapper extends OpenDistroSecurityIndexSearcherWrapper {
|
||||
|
||||
private static final Set<String> metaFields = Sets.union(Sets.newHashSet("_source", "_version"),
|
||||
Sets.newHashSet(MapperService.getAllMetaFields()));
|
||||
private final ClusterService clusterService;
|
||||
private final IndexService indexService;
|
||||
private final ComplianceConfig complianceConfig;
|
||||
private final AuditLog auditlog;
|
||||
|
||||
public OpenDistroSecurityFlsDlsIndexSearcherWrapper(final IndexService indexService, final Settings settings,
|
||||
final AdminDNs adminDNs, final ClusterService clusterService, final AuditLog auditlog,
|
||||
final ComplianceIndexingOperationListener ciol, final ComplianceConfig complianceConfig) {
|
||||
super(indexService, settings, adminDNs);
|
||||
ciol.setIs(indexService);
|
||||
this.clusterService = clusterService;
|
||||
this.indexService = indexService;
|
||||
this.complianceConfig = complianceConfig;
|
||||
this.auditlog = auditlog;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected DirectoryReader dlsFlsWrap(final DirectoryReader reader, boolean isAdmin) throws IOException {
|
||||
|
||||
final ShardId shardId = ShardUtils.extractShardId(reader);
|
||||
|
||||
Set<String> flsFields = null;
|
||||
Set<String> maskedFields = null;
|
||||
BitSetProducer bsp = null;
|
||||
|
||||
if(!isAdmin) {
|
||||
|
||||
final Map<String, Set<String>> allowedFlsFields = (Map<String, Set<String>>) HeaderHelper.deserializeSafeFromHeader(threadContext,
|
||||
ConfigConstants.OPENDISTRO_SECURITY_FLS_FIELDS_HEADER);
|
||||
final Map<String, Set<String>> queries = (Map<String, Set<String>>) HeaderHelper.deserializeSafeFromHeader(threadContext,
|
||||
ConfigConstants.OPENDISTRO_SECURITY_DLS_QUERY_HEADER);
|
||||
final Map<String, Set<String>> maskedFieldsMap = (Map<String, Set<String>>) HeaderHelper.deserializeSafeFromHeader(threadContext,
|
||||
ConfigConstants.OPENDISTRO_SECURITY_MASKED_FIELD_HEADER);
|
||||
|
||||
final String flsEval = OpenDistroSecurityUtils.evalMap(allowedFlsFields, index.getName());
|
||||
final String dlsEval = OpenDistroSecurityUtils.evalMap(queries, index.getName());
|
||||
final String maskedEval = OpenDistroSecurityUtils.evalMap(maskedFieldsMap, index.getName());
|
||||
|
||||
if (flsEval != null) {
|
||||
flsFields = new HashSet<>(metaFields);
|
||||
flsFields.addAll(allowedFlsFields.get(flsEval));
|
||||
}
|
||||
|
||||
|
||||
|
||||
if (dlsEval != null) {
|
||||
final Set<String> unparsedDlsQueries = queries.get(dlsEval);
|
||||
if(unparsedDlsQueries != null && !unparsedDlsQueries.isEmpty()) {
|
||||
final BitsetFilterCache bsfc = this.indexService.cache().bitsetFilterCache();
|
||||
//disable reader optimizations
|
||||
final Query dlsQuery = DlsQueryParser.parse(unparsedDlsQueries, this.indexService.newQueryShardContext(shardId.getId(), null, null, null)
|
||||
, this.indexService.xContentRegistry());
|
||||
bsp = dlsQuery==null?null:bsfc.getBitSetProducer(dlsQuery);
|
||||
}
|
||||
}
|
||||
|
||||
if (maskedEval != null) {
|
||||
maskedFields = new HashSet<>();
|
||||
maskedFields.addAll(maskedFieldsMap.get(maskedEval));
|
||||
}
|
||||
}
|
||||
|
||||
return new DlsFlsFilterLeafReader.DlsFlsDirectoryReader(reader, flsFields, bsp,
|
||||
indexService, threadContext, clusterService, complianceConfig, auditlog, maskedFields, shardId);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected IndexSearcher dlsFlsWrap(final IndexSearcher searcher, boolean isAdmin) throws EngineException {
|
||||
|
||||
if(searcher.getIndexReader().getClass() != DlsFlsFilterLeafReader.DlsFlsDirectoryReader.class
|
||||
&& searcher.getIndexReader().getClass() != EmptyFilterLeafReader.EmptyDirectoryReader.class) {
|
||||
throw new RuntimeException("Unexpected index reader class "+searcher.getIndexReader().getClass());
|
||||
}
|
||||
|
||||
return searcher;
|
||||
}
|
||||
}
|
@ -0,0 +1,302 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.configuration;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.DocWriteRequest;
|
||||
import org.elasticsearch.action.IndicesRequest.Replaceable;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest;
|
||||
import org.elasticsearch.action.get.MultiGetRequest.Item;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.MultiSearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.support.replication.ReplicationRequest;
|
||||
import org.elasticsearch.action.support.single.shard.SingleShardRequest;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
|
||||
import org.elasticsearch.action.termvectors.TermVectorsRequest;
|
||||
import org.elasticsearch.action.update.UpdateRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesInterceptor;
|
||||
import com.amazon.opendistroforelasticsearch.security.resolver.IndexResolverReplacer.Resolved;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
|
||||
public class PrivilegesInterceptorImpl extends PrivilegesInterceptor {
|
||||
|
||||
private static final String USER_TENANT = "__user__";
|
||||
private static final String EMPTY_STRING = "";
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
|
||||
public PrivilegesInterceptorImpl(IndexNameExpressionResolver resolver, ClusterService clusterService, Client client,
|
||||
ThreadPool threadPool) {
|
||||
super(resolver, clusterService, client, threadPool);
|
||||
}
|
||||
|
||||
private boolean isTenantAllowed(final ActionRequest request, final String action, final User user, final Map<String, Boolean> tenants, final String requestedTenant) {
|
||||
|
||||
if (!tenants.keySet().contains(requestedTenant)) {
|
||||
log.warn("Tenant {} is not allowed for user {}", requestedTenant, user.getName());
|
||||
return false;
|
||||
} else {
|
||||
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("request "+request.getClass());
|
||||
}
|
||||
|
||||
if (request instanceof IndexRequest) {
|
||||
|
||||
final IndexRequest ir = ((IndexRequest) request);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("type " + ir.type());
|
||||
log.debug("id " + ir.id());
|
||||
log.debug("source " + (ir.source() == null ? null : ir.source().utf8ToString()));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (request instanceof UpdateRequest) {
|
||||
|
||||
final UpdateRequest ir = ((UpdateRequest) request);
|
||||
|
||||
if (log.isDebugEnabled()) {
|
||||
log.debug("type " + ir.type());
|
||||
log.debug("id " + ir.id());
|
||||
log.debug("source " + (ir.doc() == null ? null : ir.doc().source()==null?null:ir.doc().source().utf8ToString()));
|
||||
}
|
||||
}
|
||||
|
||||
if (tenants.get(requestedTenant) == Boolean.FALSE
|
||||
&& action.startsWith("indices:data/write")) {
|
||||
log.warn("Tenant {} is not allowed to write (user: {})", requestedTenant, user.getName());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* return Boolean.TRUE to prematurely deny request
|
||||
* return Boolean.FALSE to prematurely allow request
|
||||
* return null to go through original eval flow
|
||||
*
|
||||
*/
|
||||
@Override
|
||||
public Boolean replaceKibanaIndex(final ActionRequest request, final String action, final User user, final Settings config, final Resolved requestedResolved, final Map<String, Boolean> tenants) {
|
||||
|
||||
final boolean enabled = config.getAsBoolean("opendistro_security.dynamic.kibana.multitenancy_enabled", true);
|
||||
|
||||
if(!enabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
//next two lines needs to be retrieved from configuration
|
||||
final String kibanaserverUsername = config.get("opendistro_security.dynamic.kibana.server_username","kibanaserver");
|
||||
final String kibanaIndexName = config.get("opendistro_security.dynamic.kibana.index",".kibana");
|
||||
|
||||
String requestedTenant = user.getRequestedTenant();
|
||||
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("raw requestedTenant: '"+requestedTenant+"'");
|
||||
}
|
||||
|
||||
if(requestedTenant == null || requestedTenant.length() == 0) {
|
||||
if(log.isTraceEnabled()) {
|
||||
log.trace("No tenant, will resolve to "+kibanaIndexName);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
if(USER_TENANT.equals(requestedTenant)) {
|
||||
requestedTenant = user.getName();
|
||||
}
|
||||
|
||||
if (log.isDebugEnabled() && !user.getName().equals(kibanaserverUsername)) {
|
||||
//log statements only here
|
||||
log.debug("requestedResolved: "+requestedResolved);
|
||||
}
|
||||
|
||||
if (!user.getName().equals(kibanaserverUsername)
|
||||
&& requestedResolved.getAllIndices().size() == 1
|
||||
&& requestedResolved.getAllIndices().contains(toUserIndexName(kibanaIndexName, requestedTenant))) {
|
||||
|
||||
if(isTenantAllowed(request, action, user, tenants, requestedTenant)) {
|
||||
return Boolean.FALSE;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//intercept when requests are not made by the kibana server and if the kibana index/alias (.kibana) is the only index/alias involved
|
||||
if (!user.getName().equals(kibanaserverUsername)
|
||||
&& resolveToKibanaIndexOrAlias(requestedResolved, kibanaIndexName)) {
|
||||
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("requestedTenant: "+requestedTenant);
|
||||
log.debug("is user tenant: "+requestedTenant.equals(user.getName()));
|
||||
}
|
||||
|
||||
if(!isTenantAllowed(request, action, user, tenants, requestedTenant)) {
|
||||
return Boolean.TRUE;
|
||||
}
|
||||
|
||||
//TODO handle user tenant in that way that this tenant cannot be specified as regular tenant
|
||||
//to avoid security issue
|
||||
|
||||
replaceIndex(request, kibanaIndexName, toUserIndexName(kibanaIndexName, requestedTenant), action);
|
||||
return Boolean.FALSE;
|
||||
|
||||
} else if (!user.getName().equals(kibanaserverUsername)) {
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("not a request to only the .kibana index");
|
||||
log.trace(user.getName() + "/" + kibanaserverUsername);
|
||||
log.trace(requestedResolved + " does not contain only " + kibanaIndexName);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private void replaceIndex(final ActionRequest request, final String oldIndexName, final String newIndexName, final String action) {
|
||||
boolean kibOk = false;
|
||||
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("{} index will be replaced with {} in this {} request", oldIndexName, newIndexName, request.getClass().getName());
|
||||
}
|
||||
|
||||
if(request instanceof GetFieldMappingsIndexRequest
|
||||
|| request instanceof GetFieldMappingsRequest) {
|
||||
return;
|
||||
}
|
||||
|
||||
//handle msearch and mget
|
||||
//in case of GET change the .kibana index to the userskibanaindex
|
||||
//in case of Search add the userskibanaindex
|
||||
//if (request instanceof CompositeIndicesRequest) {
|
||||
String[] newIndexNames = new String[] { newIndexName };
|
||||
|
||||
|
||||
// CreateIndexRequest
|
||||
if (request instanceof CreateIndexRequest) {
|
||||
((CreateIndexRequest) request).index(newIndexName);
|
||||
kibOk = true;
|
||||
} else if (request instanceof BulkRequest) {
|
||||
|
||||
for (DocWriteRequest<?> ar : ((BulkRequest) request).requests()) {
|
||||
|
||||
if(ar instanceof DeleteRequest) {
|
||||
((DeleteRequest) ar).index(newIndexName);
|
||||
}
|
||||
|
||||
if(ar instanceof IndexRequest) {
|
||||
((IndexRequest) ar).index(newIndexName);
|
||||
}
|
||||
|
||||
if(ar instanceof UpdateRequest) {
|
||||
((UpdateRequest) ar).index(newIndexName);
|
||||
}
|
||||
}
|
||||
|
||||
kibOk = true;
|
||||
|
||||
} else if (request instanceof MultiGetRequest) {
|
||||
|
||||
for (Item item : ((MultiGetRequest) request).getItems()) {
|
||||
item.index(newIndexName);
|
||||
}
|
||||
|
||||
kibOk = true;
|
||||
|
||||
} else if (request instanceof MultiSearchRequest) {
|
||||
|
||||
for (SearchRequest ar : ((MultiSearchRequest) request).requests()) {
|
||||
ar.indices(newIndexNames);
|
||||
}
|
||||
|
||||
kibOk = true;
|
||||
|
||||
} else if (request instanceof MultiTermVectorsRequest) {
|
||||
|
||||
for (TermVectorsRequest ar : (Iterable<TermVectorsRequest>) () -> ((MultiTermVectorsRequest) request).iterator()) {
|
||||
ar.index(newIndexName);
|
||||
}
|
||||
|
||||
kibOk = true;
|
||||
} else if (request instanceof UpdateRequest) {
|
||||
((UpdateRequest) request).index(newIndexName);
|
||||
kibOk = true;
|
||||
} else if (request instanceof IndexRequest) {
|
||||
((IndexRequest) request).index(newIndexName);
|
||||
kibOk = true;
|
||||
} else if (request instanceof DeleteRequest) {
|
||||
((DeleteRequest) request).index(newIndexName);
|
||||
kibOk = true;
|
||||
} else if (request instanceof SingleShardRequest) {
|
||||
((SingleShardRequest<?>) request).index(newIndexName);
|
||||
kibOk = true;
|
||||
} else if (request instanceof RefreshRequest) {
|
||||
(( RefreshRequest) request).indices(newIndexNames); //???
|
||||
kibOk = true;
|
||||
} else if (request instanceof ReplicationRequest) {
|
||||
((ReplicationRequest<?>) request).index(newIndexName);
|
||||
kibOk = true;
|
||||
} else if (request instanceof Replaceable) {
|
||||
Replaceable replaceableRequest = (Replaceable) request;
|
||||
replaceableRequest.indices(newIndexNames);
|
||||
kibOk = true;
|
||||
} else {
|
||||
log.warn("Dont know what to do (1) with {}", request.getClass());
|
||||
}
|
||||
|
||||
if (!kibOk) {
|
||||
log.warn("Dont know what to do (2) with {}", request.getClass());
|
||||
}
|
||||
}
|
||||
|
||||
private String toUserIndexName(final String originalKibanaIndex, final String tenant) {
|
||||
|
||||
if(tenant == null) {
|
||||
throw new ElasticsearchException("tenant must not be null here");
|
||||
}
|
||||
|
||||
return originalKibanaIndex+"_"+tenant.hashCode()+"_"+tenant.toLowerCase().replaceAll("[^a-z0-9]+",EMPTY_STRING);
|
||||
}
|
||||
|
||||
private boolean resolveToKibanaIndexOrAlias(final Resolved requestedResolved, final String kibanaIndexName) {
|
||||
return (requestedResolved.getAllIndices().size() == 1 && requestedResolved.getAllIndices().contains(kibanaIndexName))
|
||||
|| (requestedResolved.getAliases().size() == 1 && requestedResolved.getAliases().contains(kibanaIndexName));
|
||||
}
|
||||
}
|
@ -0,0 +1,540 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.Settings.Builder;
|
||||
import org.elasticsearch.common.util.concurrent.ThreadContext.StoredContext;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.action.configupdate.ConfigUpdateAction;
|
||||
import com.amazon.opendistroforelasticsearch.security.action.configupdate.ConfigUpdateNodeResponse;
|
||||
import com.amazon.opendistroforelasticsearch.security.action.configupdate.ConfigUpdateRequest;
|
||||
import com.amazon.opendistroforelasticsearch.security.action.configupdate.ConfigUpdateResponse;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator.ErrorType;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
|
||||
public abstract class AbstractApiAction extends BaseRestHandler {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
|
||||
protected final IndexBaseConfigurationRepository cl;
|
||||
protected final ClusterService cs;
|
||||
final ThreadPool threadPool;
|
||||
private String opendistrosecurityIndex;
|
||||
private final RestApiPrivilegesEvaluator restApiPrivilegesEvaluator;
|
||||
protected final AuditLog auditLog;
|
||||
|
||||
protected AbstractApiAction(final Settings settings, final Path configPath, final RestController controller,
|
||||
final Client client, final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl,
|
||||
final ClusterService cs, final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator,
|
||||
ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings);
|
||||
this.opendistrosecurityIndex = settings.get(ConfigConstants.OPENDISTRO_SECURITY_CONFIG_INDEX_NAME,
|
||||
ConfigConstants.OPENDISTRO_SECURITY_DEFAULT_CONFIG_INDEX);
|
||||
|
||||
this.cl = cl;
|
||||
this.cs = cs;
|
||||
this.threadPool = threadPool;
|
||||
this.restApiPrivilegesEvaluator = new RestApiPrivilegesEvaluator(settings, adminDNs, evaluator,
|
||||
principalExtractor, configPath, threadPool);
|
||||
this.auditLog = auditLog;
|
||||
}
|
||||
|
||||
protected abstract AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... params);
|
||||
|
||||
protected abstract String getResourceName();
|
||||
|
||||
protected abstract String getConfigName();
|
||||
|
||||
protected Tuple<String[], RestResponse> handleApiRequest(final RestRequest request, final Client client)
|
||||
throws Throwable {
|
||||
|
||||
// validate additional settings, if any
|
||||
AbstractConfigurationValidator validator = getValidator(request, request.content());
|
||||
if (!validator.validateSettings()) {
|
||||
request.params().clear();
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.BAD_REQUEST, validator.errorsAsXContent()));
|
||||
}
|
||||
switch (request.method()) {
|
||||
case DELETE:
|
||||
return handleDelete(request, client, validator.settingsBuilder());
|
||||
case POST:
|
||||
return handlePost(request, client, validator.settingsBuilder());
|
||||
case PUT:
|
||||
return handlePut(request, client, validator.settingsBuilder());
|
||||
case GET:
|
||||
return handleGet(request, client, validator.settingsBuilder());
|
||||
default:
|
||||
throw new IllegalArgumentException(request.method() + " not supported");
|
||||
}
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> handleDelete(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettingsBuilder) throws Throwable {
|
||||
final String name = request.param("name");
|
||||
|
||||
if (name == null || name.length() == 0) {
|
||||
return badRequestResponse("No " + getResourceName() + " specified");
|
||||
}
|
||||
|
||||
final Settings existingAsSettings = loadAsSettings(getConfigName(), false);
|
||||
|
||||
if (isHidden(existingAsSettings, name)) {
|
||||
return notFound(getResourceName() + " " + name + " not found.");
|
||||
}
|
||||
|
||||
if (isReadOnly(existingAsSettings, name)) {
|
||||
return forbidden("Resource '"+ name +"' is read-only.");
|
||||
}
|
||||
|
||||
final Map<String, Object> config = Utils.convertJsonToxToStructuredMap(Settings.builder().put(existingAsSettings).build());
|
||||
|
||||
boolean resourceExisted = config.containsKey(name);
|
||||
config.remove(name);
|
||||
if (resourceExisted) {
|
||||
save(client, request, getConfigName(), Utils.convertStructuredMapToBytes(config));
|
||||
return successResponse("'" + name + "' deleted.", getConfigName());
|
||||
} else {
|
||||
return notFound(getResourceName() + " " + name + " not found.");
|
||||
}
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> handlePut(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettingsBuilder) throws Throwable {
|
||||
|
||||
final String name = request.param("name");
|
||||
|
||||
if (name == null || name.length() == 0) {
|
||||
return badRequestResponse("No " + getResourceName() + " specified");
|
||||
}
|
||||
|
||||
final Settings existingAsSettings = loadAsSettings(getConfigName(), false);
|
||||
|
||||
if (isHidden(existingAsSettings, name)) {
|
||||
return forbidden("Resource '"+ name +"' is not available.");
|
||||
}
|
||||
|
||||
if (isReadOnly(existingAsSettings, name)) {
|
||||
return forbidden("Resource '"+ name +"' is read-only.");
|
||||
}
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace(additionalSettingsBuilder.build());
|
||||
}
|
||||
|
||||
final Map<String, Object> con = Utils.convertJsonToxToStructuredMap(existingAsSettings);
|
||||
|
||||
boolean existed = con.containsKey(name);
|
||||
|
||||
con.put(name, Utils.convertJsonToxToStructuredMap(additionalSettingsBuilder.build()));
|
||||
|
||||
save(client, request, getConfigName(), Utils.convertStructuredMapToBytes(con));
|
||||
if (existed) {
|
||||
return successResponse("'" + name + "' updated.", getConfigName());
|
||||
} else {
|
||||
return createdResponse("'" + name + "' created.", getConfigName());
|
||||
}
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> handlePost(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettings) throws Throwable {
|
||||
return notImplemented(Method.POST);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> handleGet(RestRequest request, Client client, Builder additionalSettings)
|
||||
throws Throwable {
|
||||
|
||||
final String resourcename = request.param("name");
|
||||
|
||||
final Settings.Builder settingsBuilder = load(getConfigName(), true);
|
||||
|
||||
// filter hidden resources and sensitive settings
|
||||
filter(settingsBuilder);
|
||||
|
||||
final Settings configurationSettings = settingsBuilder.build();
|
||||
|
||||
// no specific resource requested, return complete config
|
||||
if (resourcename == null || resourcename.length() == 0) {
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.OK, convertToJson(configurationSettings)));
|
||||
}
|
||||
|
||||
|
||||
|
||||
final Map<String, Object> con =
|
||||
new HashMap<>(Utils.convertJsonToxToStructuredMap(Settings.builder().put(configurationSettings).build()))
|
||||
.entrySet()
|
||||
.stream()
|
||||
.filter(f->f.getKey() != null && f.getKey().equals(resourcename)) //copy keys
|
||||
.collect(Collectors.toMap(p -> p.getKey(), p -> p.getValue()));
|
||||
|
||||
if (!con.containsKey(resourcename)) {
|
||||
return notFound("Resource '" + resourcename + "' not found.");
|
||||
}
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.OK, XContentHelper.convertToJson(Utils.convertStructuredMapToBytes(con), false, false, XContentType.JSON)));
|
||||
}
|
||||
|
||||
protected final Settings.Builder load(final String config, boolean triggerComplianceWhenCached) {
|
||||
return Settings.builder().put(loadAsSettings(config, triggerComplianceWhenCached));
|
||||
}
|
||||
|
||||
protected final Settings loadAsSettings(final String config, boolean triggerComplianceWhenCached) {
|
||||
return cl.getConfiguration(config, triggerComplianceWhenCached);
|
||||
}
|
||||
|
||||
protected boolean ensureIndexExists(final Client client) {
|
||||
if (!cs.state().metaData().hasConcreteIndex(this.opendistrosecurityIndex)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
protected void filter(Settings.Builder builder) {
|
||||
Settings settings = builder.build();
|
||||
|
||||
for (Map.Entry<String, Settings> entry : settings.getAsGroups(true).entrySet()) {
|
||||
if (entry.getValue().getAsBoolean("hidden", false)) {
|
||||
for (String subKey : entry.getValue().keySet()) {
|
||||
builder.remove(entry.getKey() + "." + subKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void save(final Client client, final RestRequest request, final String config,
|
||||
final Settings.Builder settings) throws Throwable {
|
||||
save(client, request, config, toSource(settings));
|
||||
}
|
||||
|
||||
protected void save(final Client client, final RestRequest request, final String config,
|
||||
final BytesReference bytesRef) throws Throwable {
|
||||
final Semaphore sem = new Semaphore(0);
|
||||
final List<Throwable> exception = new ArrayList<Throwable>(1);
|
||||
final IndexRequest ir = new IndexRequest(this.opendistrosecurityIndex);
|
||||
|
||||
String type = "security";
|
||||
String id = config;
|
||||
|
||||
if (cs.state().metaData().index(this.opendistrosecurityIndex).mapping("config") != null) {
|
||||
type = config;
|
||||
id = "0";
|
||||
}
|
||||
|
||||
client.index(ir.type(type).id(id).setRefreshPolicy(RefreshPolicy.IMMEDIATE).source(config, bytesRef),
|
||||
new ActionListener<IndexResponse>() {
|
||||
|
||||
@Override
|
||||
public void onResponse(final IndexResponse response) {
|
||||
sem.release();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("{} successfully updated", config);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(final Exception e) {
|
||||
sem.release();
|
||||
exception.add(e);
|
||||
logger.error("Cannot update {} due to", config, e);
|
||||
}
|
||||
});
|
||||
|
||||
if (!sem.tryAcquire(2, TimeUnit.MINUTES)) {
|
||||
// timeout
|
||||
logger.error("Cannot update {} due to timeout}", config);
|
||||
throw new ElasticsearchException("Timeout updating " + config);
|
||||
}
|
||||
|
||||
if (exception.size() > 0) {
|
||||
throw exception.get(0);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
|
||||
|
||||
// consume all parameters first so we can return a correct HTTP status,
|
||||
// not 400
|
||||
consumeParameters(request);
|
||||
|
||||
// TODO: - Initialize if non-existant
|
||||
// check if Security index has been initialized
|
||||
if (!ensureIndexExists(client)) {
|
||||
return channel -> channel.sendResponse(
|
||||
new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, ErrorType.OPENDISTRO_SECURITY_NOT_INITIALIZED.getMessage()));
|
||||
}
|
||||
|
||||
// check if request is authorized
|
||||
String authError = restApiPrivilegesEvaluator.checkAccessPermissions(request, getEndpoint());
|
||||
|
||||
if (authError != null) {
|
||||
logger.error("No permission to access REST API: " + authError);
|
||||
final User user = (User) threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER);
|
||||
auditLog.logMissingPrivileges(authError, user==null?null:user.getName(), request);
|
||||
// for rest request
|
||||
request.params().clear();
|
||||
final BytesRestResponse response = (BytesRestResponse)forbidden("No permission to access REST API: " + authError).v2();
|
||||
return channel -> channel.sendResponse(response);
|
||||
}
|
||||
|
||||
final Semaphore sem = new Semaphore(0);
|
||||
final List<Throwable> exception = new ArrayList<Throwable>(1);
|
||||
final Tuple<String[], RestResponse> response;
|
||||
|
||||
final Object originalUser = threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER);
|
||||
final Object originalRemoteAddress = threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS);
|
||||
final Object originalOrigin = threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_ORIGIN);
|
||||
|
||||
try (StoredContext ctx = threadPool.getThreadContext().stashContext()) {
|
||||
|
||||
threadPool.getThreadContext().putHeader(ConfigConstants.OPENDISTRO_SECURITY_CONF_REQUEST_HEADER, "true");
|
||||
threadPool.getThreadContext().putTransient(ConfigConstants.OPENDISTRO_SECURITY_USER, originalUser);
|
||||
threadPool.getThreadContext().putTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS, originalRemoteAddress);
|
||||
threadPool.getThreadContext().putTransient(ConfigConstants.OPENDISTRO_SECURITY_ORIGIN, originalOrigin);
|
||||
|
||||
response = handleApiRequest(request, client);
|
||||
|
||||
// reload config
|
||||
if (response.v1().length > 0) {
|
||||
|
||||
final ConfigUpdateRequest cur = new ConfigUpdateRequest(response.v1());
|
||||
// cur.putInContext(ConfigConstants.OPENDISTRO_SECURITY_USER,
|
||||
// new User((String)
|
||||
// request.getFromContext(ConfigConstants.OPENDISTRO_SECURITY_SSL_PRINCIPAL)));
|
||||
|
||||
client.execute(ConfigUpdateAction.INSTANCE, cur, new ActionListener<ConfigUpdateResponse>() {
|
||||
|
||||
@Override
|
||||
public void onFailure(final Exception e) {
|
||||
sem.release();
|
||||
logger.error("Cannot update {} due to", Arrays.toString(response.v1()), e);
|
||||
exception.add(e);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResponse(final ConfigUpdateResponse ur) {
|
||||
sem.release();
|
||||
if (!checkConfigUpdateResponse(ur)) {
|
||||
logger.error("Cannot update {}", Arrays.toString(response.v1()));
|
||||
exception.add(
|
||||
new ElasticsearchException("Unable to update " + Arrays.toString(response.v1())));
|
||||
} else if (logger.isDebugEnabled()) {
|
||||
logger.debug("Configs {} successfully updated", Arrays.toString(response.v1()));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
} else {
|
||||
sem.release();
|
||||
}
|
||||
|
||||
} catch (final Throwable e) {
|
||||
logger.error("Unexpected exception {}", e.toString(), e);
|
||||
request.params().clear();
|
||||
return channel -> channel
|
||||
.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.toString()));
|
||||
}
|
||||
|
||||
try {
|
||||
if (!sem.tryAcquire(2, TimeUnit.MINUTES)) {
|
||||
// timeout
|
||||
logger.error("Cannot update {} due to timeout", Arrays.toString(response.v1()));
|
||||
throw new ElasticsearchException("Timeout updating " + Arrays.toString(response.v1()));
|
||||
}
|
||||
} catch (final InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
|
||||
if (exception.size() > 0) {
|
||||
request.params().clear();
|
||||
return channel -> channel
|
||||
.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, exception.get(0).toString()));
|
||||
}
|
||||
|
||||
return channel -> channel.sendResponse(response.v2());
|
||||
|
||||
}
|
||||
|
||||
protected static BytesReference toSource(final Settings.Builder settingsBuilder) throws IOException {
|
||||
final XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject(); // 1
|
||||
settingsBuilder.build().toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject(); // 2
|
||||
return BytesReference.bytes(builder);
|
||||
}
|
||||
|
||||
protected boolean checkConfigUpdateResponse(final ConfigUpdateResponse response) {
|
||||
|
||||
final int nodeCount = cs.state().getNodes().getNodes().size();
|
||||
final int expectedConfigCount = 1;
|
||||
|
||||
boolean success = response.getNodes().size() == nodeCount;
|
||||
if (!success) {
|
||||
logger.error(
|
||||
"Expected " + nodeCount + " nodes to return response, but got only " + response.getNodes().size());
|
||||
}
|
||||
|
||||
for (final String nodeId : response.getNodesMap().keySet()) {
|
||||
final ConfigUpdateNodeResponse node = response.getNodesMap().get(nodeId);
|
||||
final boolean successNode = node.getUpdatedConfigTypes() != null
|
||||
&& node.getUpdatedConfigTypes().length == expectedConfigCount;
|
||||
|
||||
if (!successNode) {
|
||||
logger.error("Expected " + expectedConfigCount + " config types for node " + nodeId + " but got only "
|
||||
+ Arrays.toString(node.getUpdatedConfigTypes()));
|
||||
}
|
||||
|
||||
success = success && successNode;
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
protected static XContentBuilder convertToJson(Settings settings) throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.prettyPrint();
|
||||
builder.startObject();
|
||||
settings.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> response(RestStatus status, String statusString, String message,
|
||||
String... configs) {
|
||||
|
||||
try {
|
||||
final XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
builder.field("status", statusString);
|
||||
builder.field("message", message);
|
||||
builder.endObject();
|
||||
String[] configsToUpdate = configs == null ? new String[0] : configs;
|
||||
return new Tuple<String[], RestResponse>(configsToUpdate, new BytesRestResponse(status, builder));
|
||||
} catch (IOException ex) {
|
||||
logger.error("Cannot build response", ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> successResponse(String message, String... configs) {
|
||||
return response(RestStatus.OK, RestStatus.OK.name(), message, configs);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> createdResponse(String message, String... configs) {
|
||||
return response(RestStatus.CREATED, RestStatus.CREATED.name(), message, configs);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> badRequestResponse(String message) {
|
||||
return response(RestStatus.BAD_REQUEST, RestStatus.BAD_REQUEST.name(), message);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> notFound(String message) {
|
||||
return response(RestStatus.NOT_FOUND, RestStatus.NOT_FOUND.name(), message);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> forbidden(String message) {
|
||||
return response(RestStatus.FORBIDDEN, RestStatus.FORBIDDEN.name(), message);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> internalErrorResponse(String message) {
|
||||
return response(RestStatus.INTERNAL_SERVER_ERROR, RestStatus.INTERNAL_SERVER_ERROR.name(), message);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> unprocessable(String message) {
|
||||
return response(RestStatus.UNPROCESSABLE_ENTITY, RestStatus.UNPROCESSABLE_ENTITY.name(), message);
|
||||
}
|
||||
|
||||
protected Tuple<String[], RestResponse> notImplemented(Method method) {
|
||||
return response(RestStatus.NOT_IMPLEMENTED, RestStatus.NOT_IMPLEMENTED.name(),
|
||||
"Method " + method.name() + " not supported for this action.");
|
||||
}
|
||||
|
||||
protected boolean isReadOnly(Settings settings, String resourceName) {
|
||||
return settings.getAsBoolean(resourceName+ "." + ConfigConstants.CONFIGKEY_READONLY, Boolean.FALSE);
|
||||
}
|
||||
|
||||
protected boolean isHidden(Settings settings, String resourceName) {
|
||||
return settings.getAsBoolean(resourceName+ "." + ConfigConstants.CONFIGKEY_HIDDEN, Boolean.FALSE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Consume all defined parameters for the request. Before we handle the
|
||||
* request in subclasses where we actually need the parameter, some global
|
||||
* checks are performed, e.g. check whether the Security index exists. Thus, the
|
||||
* parameter(s) have not been consumed, and ES will always return a 400 with
|
||||
* an internal error message.
|
||||
*
|
||||
* @param request
|
||||
*/
|
||||
protected void consumeParameters(final RestRequest request) {
|
||||
request.param("name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return getClass().getSimpleName();
|
||||
}
|
||||
|
||||
protected abstract Endpoint getEndpoint();
|
||||
|
||||
}
|
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.ActionGroupValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class ActionGroupsApiAction extends PatchableResourceApiAction {
|
||||
|
||||
@Inject
|
||||
public ActionGroupsApiAction(final Settings settings, final Path configPath, final RestController controller, final Client client,
|
||||
final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl, final ClusterService cs,
|
||||
final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool, auditLog);
|
||||
|
||||
// legacy mapping for backwards compatibility
|
||||
// TODO: remove in next version
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/actiongroup/{name}", this);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/actiongroup/", this);
|
||||
controller.registerHandler(Method.DELETE, "/_opendistro/_security/api/actiongroup/{name}", this);
|
||||
controller.registerHandler(Method.PUT, "/_opendistro/_security/api/actiongroup/{name}", this);
|
||||
|
||||
// corrected mapping, introduced in Open Distro Security
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/actiongroups/{name}", this);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/actiongroups/", this);
|
||||
controller.registerHandler(Method.DELETE, "/_opendistro/_security/api/actiongroups/{name}", this);
|
||||
controller.registerHandler(Method.PUT, "/_opendistro/_security/api/actiongroups/{name}", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/actiongroups/", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/actiongroups/{name}", this);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.ACTIONGROUPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(final RestRequest request, BytesReference ref, Object... param) {
|
||||
return new ActionGroupValidator(request, ref, this.settings, param);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
return "actiongroup";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
return ConfigConstants.CONFIGNAME_ACTION_GROUPS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void consumeParameters(final RestRequest request) {
|
||||
request.param("name");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,96 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.NoOpValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
|
||||
public class AuthTokenProcessorAction extends AbstractApiAction {
|
||||
@Inject
|
||||
public AuthTokenProcessorAction(final Settings settings, final Path configPath, final RestController controller,
|
||||
final Client client, final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl,
|
||||
final ClusterService cs, final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator,
|
||||
ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool,
|
||||
auditLog);
|
||||
|
||||
controller.registerHandler(Method.POST, "/_opendistro/_security/api/authtoken", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handlePost(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettings) throws Throwable {
|
||||
|
||||
// Just do nothing here. Eligible authenticators will intercept calls and
|
||||
// provide own responses.
|
||||
|
||||
return new Tuple<String[], RestResponse>(new String[0], new BytesRestResponse(RestStatus.OK, ""));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... param) {
|
||||
return new NoOpValidator(request, ref, this.settings, param);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
return "authtoken";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.AUTHTOKEN;
|
||||
}
|
||||
|
||||
|
||||
public static class Response {
|
||||
private String authorization;
|
||||
|
||||
public String getAuthorization() {
|
||||
return authorization;
|
||||
}
|
||||
|
||||
public void setAuthorization(String authorization) {
|
||||
this.authorization = authorization;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
public enum Endpoint {
|
||||
ACTIONGROUPS,
|
||||
CACHE,
|
||||
CONFIGURATION,
|
||||
CONFIG,
|
||||
ROLES,
|
||||
ROLESMAPPING,
|
||||
INTERNALUSERS,
|
||||
SYSTEMINFO,
|
||||
PERMISSIONSINFO,
|
||||
AUTHTOKEN;
|
||||
}
|
@ -0,0 +1,150 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.Settings.Builder;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.action.configupdate.ConfigUpdateAction;
|
||||
import com.amazon.opendistroforelasticsearch.security.action.configupdate.ConfigUpdateRequest;
|
||||
import com.amazon.opendistroforelasticsearch.security.action.configupdate.ConfigUpdateResponse;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.NoOpValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
|
||||
public class FlushCacheApiAction extends AbstractApiAction {
|
||||
|
||||
@Inject
|
||||
public FlushCacheApiAction(final Settings settings, final Path configPath, final RestController controller, final Client client,
|
||||
final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl, final ClusterService cs,
|
||||
final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool, auditLog);
|
||||
controller.registerHandler(Method.DELETE, "/_opendistro/_security/api/cache", this);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/cache", this);
|
||||
controller.registerHandler(Method.PUT, "/_opendistro/_security/api/cache", this);
|
||||
controller.registerHandler(Method.POST, "/_opendistro/_security/api/cache", this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.CACHE;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handleDelete(RestRequest request, Client client, Builder additionalSettingsBuilder)
|
||||
throws Throwable {
|
||||
|
||||
final Semaphore sem = new Semaphore(0);
|
||||
final List<Throwable> exception = new ArrayList<Throwable>(1);
|
||||
|
||||
client.execute(
|
||||
ConfigUpdateAction.INSTANCE,
|
||||
new ConfigUpdateRequest(new String[] { "config", "roles", "rolesmapping", "internalusers", "actiongroups" }),
|
||||
new ActionListener<ConfigUpdateResponse>() {
|
||||
|
||||
@Override
|
||||
public void onResponse(ConfigUpdateResponse response) {
|
||||
sem.release();
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("cache flushed successfully");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
sem.release();
|
||||
exception.add(e);
|
||||
logger.error("Cannot flush cache due to {}", e.toString(), e);
|
||||
}
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
if (!sem.tryAcquire(30, TimeUnit.SECONDS)) {
|
||||
logger.error("Cannot flush cache due to timeout");
|
||||
return internalErrorResponse("Cannot flush cache due to timeout");
|
||||
}
|
||||
|
||||
if (exception.size() > 0) {
|
||||
logger.error("Cannot flush cache due to", exception.get(0));
|
||||
return internalErrorResponse("Cannot flush cache due to "+ exception.get(0).getMessage());
|
||||
}
|
||||
|
||||
return successResponse("Cache flushed successfully.", new String[0]);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handlePost(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettings) throws Throwable {
|
||||
return notImplemented(Method.POST);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handleGet(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettings) throws Throwable {
|
||||
return notImplemented(Method.GET);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handlePut(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettings) throws Throwable {
|
||||
return notImplemented(Method.PUT);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... param) {
|
||||
return new NoOpValidator(request, ref, this.settings, param);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
// not needed
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
// not needed
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void consumeParameters(final RestRequest request) {
|
||||
// not needed
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,126 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Set;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.NoOpValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.google.common.base.Joiner;
|
||||
|
||||
/**
|
||||
* @deprecated Use GET endpoints without resource ID in resource specific endpoints, e.g. _opendistro/_security/api/roles/
|
||||
* Will be removed in next version.
|
||||
*/
|
||||
public class GetConfigurationApiAction extends AbstractApiAction {
|
||||
|
||||
@Inject
|
||||
public GetConfigurationApiAction(final Settings settings, final Path configPath, final RestController controller, final Client client,
|
||||
final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl, final ClusterService cs,
|
||||
final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool, auditLog);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/configuration/{configname}", this);
|
||||
System.out.println("Registering Handler");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.CONFIGURATION;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handleGet(RestRequest request, Client client,
|
||||
final Settings.Builder additionalSettingsBuilder) throws Throwable {
|
||||
|
||||
final String configname = request.param("configname");
|
||||
|
||||
if (configname == null || configname.length() == 0
|
||||
|| !ConfigConstants.CONFIG_NAMES.contains(configname)) {
|
||||
return badRequestResponse("No configuration name given, must be one of "
|
||||
+ Joiner.on(",").join(ConfigConstants.CONFIG_NAMES));
|
||||
|
||||
}
|
||||
final Settings.Builder configBuilder = load(configname, true);
|
||||
filter(configBuilder, configname);
|
||||
final Settings config = configBuilder.build();
|
||||
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.OK, convertToJson(config)));
|
||||
}
|
||||
|
||||
protected void filter(Settings.Builder builder, String resourceName) {
|
||||
// common filtering
|
||||
filter(builder);
|
||||
// filter sensitive resources for internal users
|
||||
if (resourceName.equals("internalusers")) {
|
||||
filterHashes(builder);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... param) {
|
||||
return new NoOpValidator(request, ref, this.settings, param);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
// GET is handled by this class directly
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
// GET is handled by this class directly
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void consumeParameters(final RestRequest request) {
|
||||
request.param("configname");
|
||||
}
|
||||
|
||||
private void filterHashes(Settings.Builder builder) {
|
||||
// replace password hashes in addition. We must not remove them from the
|
||||
// Builder since this would remove users completely if they
|
||||
// do not have any addition properties like roles or attributes
|
||||
Set<String> entries = builder.build().getAsGroups().keySet();
|
||||
for (String key : entries) {
|
||||
builder.put(key + ".hash", "");
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,225 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.SecureRandom;
|
||||
import java.util.Arrays;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
import org.bouncycastle.crypto.generators.OpenBSDBCrypt;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.fasterxml.jackson.databind.node.TextNode;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.InternalUsersValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class InternalUsersApiAction extends PatchableResourceApiAction {
|
||||
|
||||
@Inject
|
||||
public InternalUsersApiAction(final Settings settings, final Path configPath, final RestController controller,
|
||||
final Client client, final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl,
|
||||
final ClusterService cs, final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator,
|
||||
ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool,
|
||||
auditLog);
|
||||
|
||||
// legacy mapping for backwards compatibility
|
||||
// TODO: remove in next version
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/user/{name}", this);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/user/", this);
|
||||
controller.registerHandler(Method.DELETE, "/_opendistro/_security/api/user/{name}", this);
|
||||
controller.registerHandler(Method.PUT, "/_opendistro/_security/api/user/{name}", this);
|
||||
|
||||
// corrected mapping, introduced in Open Distro Security
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/internalusers/{name}", this);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/internalusers/", this);
|
||||
controller.registerHandler(Method.DELETE, "/_opendistro/_security/api/internalusers/{name}", this);
|
||||
controller.registerHandler(Method.PUT, "/_opendistro/_security/api/internalusers/{name}", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/internalusers/", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/internalusers/{name}", this);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.INTERNALUSERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handlePut(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettingsBuilder) throws Throwable {
|
||||
|
||||
final String username = request.param("name");
|
||||
|
||||
if (username == null || username.length() == 0) {
|
||||
return badRequestResponse("No " + getResourceName() + " specified");
|
||||
}
|
||||
|
||||
if(username.contains(".")) {
|
||||
return badRequestResponse("No dots are allowed in the name. User the username attribute.");
|
||||
}
|
||||
|
||||
// TODO it might be sensible to consolidate this with the overridden method in
|
||||
// order to minimize duplicated logic
|
||||
|
||||
final Settings configurationSettings = loadAsSettings(getConfigName(), false);
|
||||
|
||||
if (isHidden(configurationSettings, username)) {
|
||||
return forbidden("Resource '" + username + "' is not available.");
|
||||
}
|
||||
|
||||
// check if resource is writeable
|
||||
Boolean readOnly = configurationSettings.getAsBoolean(username + "." + ConfigConstants.CONFIGKEY_READONLY,
|
||||
Boolean.FALSE);
|
||||
if (readOnly) {
|
||||
return forbidden("Resource '" + username + "' is read-only.");
|
||||
}
|
||||
|
||||
// if password is set, it takes precedence over hash
|
||||
String plainTextPassword = additionalSettingsBuilder.get("password");
|
||||
if (plainTextPassword != null && plainTextPassword.length() > 0) {
|
||||
additionalSettingsBuilder.remove("password");
|
||||
additionalSettingsBuilder.put("hash", hash(plainTextPassword.toCharArray()));
|
||||
}
|
||||
|
||||
// check if user exists
|
||||
final Settings.Builder internaluser = load(ConfigConstants.CONFIGNAME_INTERNAL_USERS, false);
|
||||
final Map<String, Object> config = Utils.convertJsonToxToStructuredMap(internaluser.build());
|
||||
|
||||
final boolean userExisted = config.containsKey(username);
|
||||
|
||||
// when updating an existing user password hash can be blank, which means no
|
||||
// changes
|
||||
|
||||
// sanity checks, hash is mandatory for newly created users
|
||||
if (!userExisted && additionalSettingsBuilder.get("hash") == null) {
|
||||
return badRequestResponse("Please specify either 'hash' or 'password' when creating a new internal user");
|
||||
}
|
||||
|
||||
// for existing users, hash is optional
|
||||
if (userExisted && additionalSettingsBuilder.get("hash") == null) {
|
||||
// sanity check, this should usually not happen
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> existingUserSettings = (Map<String, String>) config.get(username);
|
||||
if (!existingUserSettings.containsKey("hash")) {
|
||||
return internalErrorResponse(
|
||||
"Existing user " + username + " has no password, and no new password or hash was specified");
|
||||
}
|
||||
additionalSettingsBuilder.put("hash", (String) existingUserSettings.get("hash"));
|
||||
}
|
||||
|
||||
config.remove(username);
|
||||
|
||||
// checks complete, create or update the user
|
||||
config.put(username, Utils.convertJsonToxToStructuredMap(additionalSettingsBuilder.build()));
|
||||
|
||||
save(client, request, ConfigConstants.CONFIGNAME_INTERNAL_USERS, Utils.convertStructuredMapToBytes(config));
|
||||
|
||||
if (userExisted) {
|
||||
return successResponse("'" + username + "' updated", ConfigConstants.CONFIGNAME_INTERNAL_USERS);
|
||||
} else {
|
||||
return createdResponse("'" + username + "' created", ConfigConstants.CONFIGNAME_INTERNAL_USERS);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void filter(Settings.Builder builder) {
|
||||
super.filter(builder);
|
||||
// replace password hashes in addition. We must not remove them from the
|
||||
// Builder since this would remove users completely if they
|
||||
// do not have any addition properties like roles or attributes
|
||||
Set<String> entries = builder.build().getAsGroups().keySet();
|
||||
for (String key : entries) {
|
||||
builder.put(key + ".hash", "");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator postProcessApplyPatchResult(RestRequest request, JsonNode existingResourceAsJsonNode,
|
||||
JsonNode updatedResourceAsJsonNode, String resourceName) {
|
||||
AbstractConfigurationValidator retVal = null;
|
||||
JsonNode passwordNode = updatedResourceAsJsonNode.get("password");
|
||||
|
||||
if (passwordNode != null) {
|
||||
String plainTextPassword = passwordNode.asText();
|
||||
try {
|
||||
XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent);
|
||||
builder.startObject();
|
||||
builder.field("password", plainTextPassword);
|
||||
builder.endObject();
|
||||
retVal = getValidator(request, BytesReference.bytes(builder), resourceName);
|
||||
} catch (IOException e) {
|
||||
log.error(e);
|
||||
}
|
||||
|
||||
((ObjectNode) updatedResourceAsJsonNode).remove("password");
|
||||
((ObjectNode) updatedResourceAsJsonNode).set("hash", new TextNode(hash(plainTextPassword.toCharArray())));
|
||||
return retVal;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public static String hash(final char[] clearTextPassword) {
|
||||
final byte[] salt = new byte[16];
|
||||
new SecureRandom().nextBytes(salt);
|
||||
final String hash = OpenBSDBCrypt.generate((Objects.requireNonNull(clearTextPassword)), salt, 12);
|
||||
Arrays.fill(salt, (byte) 0);
|
||||
Arrays.fill(clearTextPassword, '\0');
|
||||
return hash;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
return "user";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
return ConfigConstants.CONFIGNAME_INTERNAL_USERS;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... params) {
|
||||
return new InternalUsersValidator(request, ref, this.settings, params);
|
||||
}
|
||||
}
|
@ -0,0 +1,98 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.NoOpValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class OpenDistroSecurityConfigAction extends AbstractApiAction {
|
||||
|
||||
@Inject
|
||||
public OpenDistroSecurityConfigAction(final Settings settings, final Path configPath, final RestController controller, final Client client,
|
||||
final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl, final ClusterService cs,
|
||||
final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool, auditLog);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/config/", this);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handleGet(RestRequest request, Client client,
|
||||
final Settings.Builder additionalSettingsBuilder) throws Throwable {
|
||||
|
||||
final Settings configurationSettings = loadAsSettings(getConfigName(), true);
|
||||
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.OK, convertToJson(configurationSettings)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handlePut(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettings) throws Throwable {
|
||||
return notImplemented(Method.PUT);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handleDelete(final RestRequest request, final Client client,
|
||||
final Settings.Builder additionalSettings) throws Throwable {
|
||||
return notImplemented(Method.DELETE);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... param) {
|
||||
return new NoOpValidator(request, ref, this.settings, param);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
return ConfigConstants.CONFIGNAME_CONFIG;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.CONFIG;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
// not needed, no single resource
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestHandler;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
|
||||
public class OpenDistroSecurityRestApiActions {
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public static Collection<RestHandler> getHandler(Settings settings, Path configPath, RestController controller, Client client,
|
||||
AdminDNs adminDns, IndexBaseConfigurationRepository cr, ClusterService cs, PrincipalExtractor principalExtractor,
|
||||
final PrivilegesEvaluator evaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
final List<RestHandler> handlers = new ArrayList<RestHandler>(9);
|
||||
handlers.add(new InternalUsersApiAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new RolesMappingApiAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new RolesApiAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new ActionGroupsApiAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new GetConfigurationApiAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new FlushCacheApiAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new OpenDistroSecurityConfigAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new PermissionsInfoAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
handlers.add(new AuthTokenProcessorAction(settings, configPath, controller, client, adminDns, cr, cs, principalExtractor, evaluator, threadPool, auditLog));
|
||||
return Collections.unmodifiableCollection(handlers);
|
||||
}
|
||||
}
|
@ -0,0 +1,243 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.DefaultObjectMapper;
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.flipkart.zjsonpatch.JsonPatch;
|
||||
import com.flipkart.zjsonpatch.JsonPatchApplicationException;
|
||||
|
||||
public abstract class PatchableResourceApiAction extends AbstractApiAction {
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
|
||||
public PatchableResourceApiAction(Settings settings, Path configPath, RestController controller, Client client,
|
||||
AdminDNs adminDNs, IndexBaseConfigurationRepository cl, ClusterService cs,
|
||||
PrincipalExtractor principalExtractor, PrivilegesEvaluator evaluator, ThreadPool threadPool,
|
||||
AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool,
|
||||
auditLog);
|
||||
}
|
||||
|
||||
private Tuple<String[], RestResponse> handlePatch(final RestRequest request, final Client client)
|
||||
throws Throwable {
|
||||
if (request.getXContentType() != XContentType.JSON) {
|
||||
return badRequestResponse("PATCH accepts only application/json");
|
||||
}
|
||||
|
||||
String name = request.param("name");
|
||||
Settings existingAsSettings = loadAsSettings(getConfigName(), false);
|
||||
|
||||
JsonNode jsonPatch;
|
||||
|
||||
try {
|
||||
jsonPatch = DefaultObjectMapper.objectMapper.readTree(request.content().utf8ToString());
|
||||
} catch (JsonParseException e) {
|
||||
log.debug("Error while parsing JSON patch", e);
|
||||
return badRequestResponse("Error in JSON patch: " + e.getMessage());
|
||||
}
|
||||
|
||||
JsonNode existingAsJsonNode = Utils.convertJsonToJackson(existingAsSettings);
|
||||
|
||||
if (!(existingAsJsonNode instanceof ObjectNode)) {
|
||||
return internalErrorResponse("Config " + getConfigName() + " is malformed");
|
||||
}
|
||||
|
||||
ObjectNode existingAsObjectNode = (ObjectNode) existingAsJsonNode;
|
||||
|
||||
if (Strings.isNullOrEmpty(name)) {
|
||||
return handleBulkPatch(request, client, existingAsSettings, existingAsObjectNode, jsonPatch);
|
||||
} else {
|
||||
return handleSinglePatch(request, client, name, existingAsSettings, existingAsObjectNode, jsonPatch);
|
||||
}
|
||||
}
|
||||
|
||||
private Tuple<String[], RestResponse> handleSinglePatch(RestRequest request, Client client, String name,
|
||||
Settings existingAsSettings, ObjectNode existingAsObjectNode, JsonNode jsonPatch) throws Throwable {
|
||||
if (isHidden(existingAsSettings, name)) {
|
||||
return notFound(getResourceName() + " " + name + " not found.");
|
||||
}
|
||||
|
||||
if (isReadOnly(existingAsSettings, name)) {
|
||||
return forbidden("Resource '" + name + "' is read-only.");
|
||||
}
|
||||
|
||||
Settings resourceSettings = existingAsSettings.getAsSettings(name);
|
||||
|
||||
if (resourceSettings.isEmpty()) {
|
||||
return notFound(getResourceName() + " " + name + " not found.");
|
||||
}
|
||||
|
||||
JsonNode existingResourceAsJsonNode = existingAsObjectNode.get(name);
|
||||
|
||||
JsonNode patchedResourceAsJsonNode;
|
||||
|
||||
try {
|
||||
patchedResourceAsJsonNode = applyPatch(jsonPatch, existingResourceAsJsonNode);
|
||||
} catch (JsonPatchApplicationException e) {
|
||||
log.debug("Error while applying JSON patch", e);
|
||||
return badRequestResponse(e.getMessage());
|
||||
}
|
||||
|
||||
AbstractConfigurationValidator originalValidator = postProcessApplyPatchResult(request, existingResourceAsJsonNode, patchedResourceAsJsonNode, name);
|
||||
|
||||
if(originalValidator != null) {
|
||||
if (!originalValidator.validateSettings()) {
|
||||
request.params().clear();
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.BAD_REQUEST, originalValidator.errorsAsXContent()));
|
||||
}
|
||||
}
|
||||
|
||||
AbstractConfigurationValidator validator = getValidator(request, patchedResourceAsJsonNode);
|
||||
|
||||
if (!validator.validateSettings()) {
|
||||
request.params().clear();
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.BAD_REQUEST, validator.errorsAsXContent()));
|
||||
}
|
||||
|
||||
JsonNode updatedAsJsonNode = existingAsObjectNode.deepCopy().set(name, patchedResourceAsJsonNode);
|
||||
|
||||
BytesReference updatedAsBytesReference = new BytesArray(
|
||||
DefaultObjectMapper.objectMapper.writeValueAsString(updatedAsJsonNode).getBytes());
|
||||
|
||||
save(client, request, getConfigName(), updatedAsBytesReference);
|
||||
|
||||
return successResponse("'" + name + "' updated.", getConfigName());
|
||||
}
|
||||
|
||||
private Tuple<String[], RestResponse> handleBulkPatch(RestRequest request, Client client,
|
||||
Settings existingAsSettings, ObjectNode existingAsObjectNode, JsonNode jsonPatch) throws Throwable {
|
||||
|
||||
JsonNode patchedAsJsonNode;
|
||||
|
||||
try {
|
||||
patchedAsJsonNode = applyPatch(jsonPatch, existingAsObjectNode);
|
||||
} catch (JsonPatchApplicationException e) {
|
||||
log.debug("Error while applying JSON patch", e);
|
||||
return badRequestResponse(e.getMessage());
|
||||
}
|
||||
|
||||
for (String resourceName : existingAsSettings.names()) {
|
||||
JsonNode oldResource = existingAsObjectNode.get(resourceName);
|
||||
JsonNode patchedResource = patchedAsJsonNode.get(resourceName);
|
||||
|
||||
if (oldResource != null && !oldResource.equals(patchedResource)) {
|
||||
|
||||
if (isReadOnly(existingAsSettings, resourceName)) {
|
||||
return forbidden("Resource '" + resourceName + "' is read-only.");
|
||||
}
|
||||
|
||||
if (isHidden(existingAsSettings, resourceName)) {
|
||||
return badRequestResponse("Resource name '" + resourceName + "' is reserved");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (Iterator<String> fieldNamesIter = patchedAsJsonNode.fieldNames(); fieldNamesIter.hasNext();) {
|
||||
String resourceName = fieldNamesIter.next();
|
||||
|
||||
JsonNode oldResource = existingAsObjectNode.get(resourceName);
|
||||
JsonNode patchedResource = patchedAsJsonNode.get(resourceName);
|
||||
|
||||
AbstractConfigurationValidator originalValidator = postProcessApplyPatchResult(request, oldResource, patchedResource, resourceName);
|
||||
|
||||
if(originalValidator != null) {
|
||||
if (!originalValidator.validateSettings()) {
|
||||
request.params().clear();
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.BAD_REQUEST, originalValidator.errorsAsXContent()));
|
||||
}
|
||||
}
|
||||
|
||||
if (oldResource == null || !oldResource.equals(patchedResource)) {
|
||||
AbstractConfigurationValidator validator = getValidator(request, patchedResource);
|
||||
|
||||
if (!validator.validateSettings()) {
|
||||
request.params().clear();
|
||||
return new Tuple<String[], RestResponse>(new String[0],
|
||||
new BytesRestResponse(RestStatus.BAD_REQUEST, validator.errorsAsXContent()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
BytesReference updatedAsBytesReference = new BytesArray(
|
||||
DefaultObjectMapper.objectMapper.writeValueAsString(patchedAsJsonNode).getBytes());
|
||||
|
||||
save(client, request, getConfigName(), updatedAsBytesReference);
|
||||
|
||||
return successResponse("Resource updated.", getConfigName());
|
||||
}
|
||||
|
||||
private JsonNode applyPatch(JsonNode jsonPatch, JsonNode existingResourceAsJsonNode) {
|
||||
return JsonPatch.apply(jsonPatch, existingResourceAsJsonNode);
|
||||
}
|
||||
|
||||
protected AbstractConfigurationValidator postProcessApplyPatchResult(RestRequest request, JsonNode existingResourceAsJsonNode, JsonNode updatedResourceAsJsonNode, String resourceName) {
|
||||
// do nothing by default
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Tuple<String[], RestResponse> handleApiRequest(final RestRequest request, final Client client)
|
||||
throws Throwable {
|
||||
|
||||
if (request.method() == Method.PATCH) {
|
||||
return handlePatch(request, client);
|
||||
} else {
|
||||
return super.handleApiRequest(request, client);
|
||||
}
|
||||
}
|
||||
|
||||
private AbstractConfigurationValidator getValidator(RestRequest request, JsonNode patchedResource)
|
||||
throws JsonProcessingException {
|
||||
BytesReference patchedResourceAsByteReference = new BytesArray(
|
||||
DefaultObjectMapper.objectMapper.writeValueAsString(patchedResource).getBytes());
|
||||
return getValidator(request, patchedResourceAsByteReference);
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.BytesRestResponse;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
|
||||
/**
|
||||
* Provides the evaluated REST API permissions for the currently logged in user
|
||||
*/
|
||||
public class PermissionsInfoAction extends BaseRestHandler {
|
||||
|
||||
private final RestApiPrivilegesEvaluator restApiPrivilegesEvaluator;
|
||||
private final ThreadPool threadPool;
|
||||
private final PrivilegesEvaluator privilegesEvaluator;
|
||||
|
||||
protected PermissionsInfoAction(final Settings settings, final Path configPath, final RestController controller, final Client client,
|
||||
final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl, final ClusterService cs,
|
||||
final PrincipalExtractor principalExtractor, final PrivilegesEvaluator privilegesEvaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/permissionsinfo", this);
|
||||
this.threadPool = threadPool;
|
||||
this.privilegesEvaluator = privilegesEvaluator;
|
||||
this.restApiPrivilegesEvaluator = new RestApiPrivilegesEvaluator(settings, adminDNs, privilegesEvaluator, principalExtractor, configPath, threadPool);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return getClass().getSimpleName();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
|
||||
switch (request.method()) {
|
||||
case GET:
|
||||
return handleGet(request, client);
|
||||
default:
|
||||
throw new IllegalArgumentException(request.method() + " not supported");
|
||||
}
|
||||
}
|
||||
|
||||
private RestChannelConsumer handleGet(RestRequest request, NodeClient client) throws IOException {
|
||||
|
||||
return new RestChannelConsumer() {
|
||||
|
||||
@Override
|
||||
public void accept(RestChannel channel) throws Exception {
|
||||
XContentBuilder builder = channel.newBuilder(); //NOSONAR
|
||||
BytesRestResponse response = null;
|
||||
|
||||
try {
|
||||
|
||||
final User user = (User) threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER);
|
||||
final TransportAddress remoteAddress = (TransportAddress) threadPool.getThreadContext()
|
||||
.getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS);
|
||||
Set<String> userRoles = privilegesEvaluator.mapSecurityRoles(user, remoteAddress);
|
||||
Boolean hasApiAccess = restApiPrivilegesEvaluator.currentUserHasRestApiAccess(userRoles);
|
||||
Map<Endpoint, List<Method>> disabledEndpoints = restApiPrivilegesEvaluator.getDisabledEndpointsForCurrentUser(user.getName(), userRoles);
|
||||
|
||||
builder.startObject();
|
||||
builder.field("user", user==null?null:user.toString());
|
||||
builder.field("user_name", user==null?null:user.getName()); //NOSONAR
|
||||
builder.field("has_api_access", hasApiAccess);
|
||||
builder.startObject("disabled_endpoints");
|
||||
for(Entry<Endpoint, List<Method>> entry : disabledEndpoints.entrySet()) {
|
||||
builder.field(entry.getKey().name(), entry.getValue());
|
||||
}
|
||||
builder.endObject();
|
||||
builder.endObject();
|
||||
response = new BytesRestResponse(RestStatus.OK, builder);
|
||||
} catch (final Exception e1) {
|
||||
e1.printStackTrace();
|
||||
builder = channel.newBuilder(); //NOSONAR
|
||||
builder.startObject();
|
||||
builder.field("error", e1.toString());
|
||||
builder.endObject();
|
||||
response = new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, builder);
|
||||
} finally {
|
||||
if(builder != null) {
|
||||
builder.close();
|
||||
}
|
||||
}
|
||||
|
||||
channel.sendResponse(response);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,424 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.support.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.util.SSLRequestHelper;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.util.SSLRequestHelper.SSLInfo;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.User;
|
||||
|
||||
// TODO: Make Singleton?
|
||||
public class RestApiPrivilegesEvaluator {
|
||||
|
||||
protected final Logger logger = LogManager.getLogger(this.getClass());
|
||||
|
||||
private final AdminDNs adminDNs;
|
||||
private final PrivilegesEvaluator privilegesEvaluator;
|
||||
private final PrincipalExtractor principalExtractor;
|
||||
private final Path configPath;
|
||||
private final ThreadPool threadPool;
|
||||
private final Settings settings;
|
||||
|
||||
private final Set<String> allowedRoles = new HashSet<>();
|
||||
|
||||
// endpoints per role, read and cached from settings. Changes here require a
|
||||
// node restart, so it's save to cache.
|
||||
private final Map<String, Map<Endpoint, List<Method>>> disabledEndpointsForRoles = new HashMap<>();
|
||||
|
||||
// endpoints per user, evaluated and cached dynamically. Changes here
|
||||
// require a node restart, so it's save to cache.
|
||||
private final Map<String, Map<Endpoint, List<Method>>> disabledEndpointsForUsers = new HashMap<>();
|
||||
|
||||
// globally disabled endpoints and methods, will always be forbidden
|
||||
Map<Endpoint, List<Method>> globallyDisabledEndpoints = new HashMap<>();
|
||||
|
||||
// all endpoints and methods, will be returned for users that do not have any access at all
|
||||
Map<Endpoint, List<Method>> allEndpoints = new HashMap<>();
|
||||
|
||||
private final Boolean roleBasedAccessEnabled;
|
||||
|
||||
public RestApiPrivilegesEvaluator(Settings settings, AdminDNs adminDNs, PrivilegesEvaluator privilegesEvaluator, PrincipalExtractor principalExtractor, Path configPath,
|
||||
ThreadPool threadPool) {
|
||||
|
||||
this.adminDNs = adminDNs;
|
||||
this.privilegesEvaluator = privilegesEvaluator;
|
||||
this.principalExtractor = principalExtractor;
|
||||
this.configPath = configPath;
|
||||
this.threadPool = threadPool;
|
||||
this.settings = settings;
|
||||
|
||||
// set up
|
||||
|
||||
// all endpoints and methods
|
||||
Map<Endpoint, List<Method>> allEndpoints = new HashMap<>();
|
||||
for(Endpoint endpoint : Endpoint.values()) {
|
||||
List<Method> allMethods = new LinkedList<>();
|
||||
allMethods.addAll(Arrays.asList(Method.values()));
|
||||
allEndpoints.put(endpoint, allMethods);
|
||||
}
|
||||
this.allEndpoints = Collections.unmodifiableMap(allEndpoints);
|
||||
|
||||
// setup role based permissions
|
||||
allowedRoles.addAll(settings.getAsList(ConfigConstants.OPENDISTRO_SECURITY_RESTAPI_ROLES_ENABLED));
|
||||
|
||||
this.roleBasedAccessEnabled = !allowedRoles.isEmpty();
|
||||
|
||||
// globally disabled endpoints, disables access to Endpoint/Method combination for all roles
|
||||
Settings globalSettings = settings.getAsSettings(ConfigConstants.OPENDISTRO_SECURITY_RESTAPI_ENDPOINTS_DISABLED + ".global");
|
||||
if (!globalSettings.isEmpty()) {
|
||||
globallyDisabledEndpoints = parseDisabledEndpoints(globalSettings);
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Globally disabled endpoints: {}", globallyDisabledEndpoints);
|
||||
}
|
||||
|
||||
for (String role : allowedRoles) {
|
||||
Settings settingsForRole = settings.getAsSettings(ConfigConstants.OPENDISTRO_SECURITY_RESTAPI_ENDPOINTS_DISABLED + "." + role);
|
||||
if (settingsForRole.isEmpty()) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("No disabled endpoints/methods for permitted role {} found, allowing all", role);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Map<Endpoint, List<Method>> disabledEndpointsForRole = parseDisabledEndpoints(settingsForRole);
|
||||
if (!disabledEndpointsForRole.isEmpty()) {
|
||||
disabledEndpointsForRoles.put(role, disabledEndpointsForRole);
|
||||
} else {
|
||||
logger.warn("Disabled endpoints/methods empty for role {}, please check configuration", role);
|
||||
}
|
||||
}
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("Parsed permission set for endpoints: {}", disabledEndpointsForRoles);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings({ "rawtypes" })
|
||||
private Map<Endpoint, List<Method>> parseDisabledEndpoints(Settings settings) {
|
||||
|
||||
// Expects Setting like: 'ACTIONGROUPS=["GET", "POST"]'
|
||||
if (settings == null || settings.isEmpty()) {
|
||||
logger.error("Settings for disabled endpoint is null or empty: '{}', skipping.", settings);
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
final Map<Endpoint, List<Method>> disabledEndpoints = new HashMap<Endpoint, List<Method>>();
|
||||
|
||||
Map<String, Object> disabledEndpointsSettings = Utils.convertJsonToxToStructuredMap(settings);
|
||||
|
||||
for (Entry<String, Object> value : disabledEndpointsSettings.entrySet()) {
|
||||
// key is the endpoint, see if it is a valid one
|
||||
String endpointString = value.getKey().toUpperCase();
|
||||
Endpoint endpoint = null;
|
||||
try {
|
||||
endpoint = Endpoint.valueOf(endpointString);
|
||||
} catch (Exception e) {
|
||||
logger.error("Unknown endpoint '{}' found in configuration, skipping.", endpointString);
|
||||
continue;
|
||||
}
|
||||
// value must be non null
|
||||
if (value.getValue() == null) {
|
||||
logger.error("Disabled HTTP methods of endpoint '{}' is null, skipping.", endpointString);
|
||||
continue;
|
||||
}
|
||||
|
||||
// value must be an array of methods
|
||||
if (!(value.getValue() instanceof Collection)) {
|
||||
logger.error("Disabled HTTP methods of endpoint '{}' must be an array, actually is '{}', skipping.", endpointString, (value.getValue().toString()));
|
||||
}
|
||||
List<Method> disabledMethods = new LinkedList<>();
|
||||
for (Object disabledMethodObj : (Collection) value.getValue()) {
|
||||
if (disabledMethodObj == null) {
|
||||
logger.error("Found null value in disabled HTTP methods of endpoint '{}', skipping.", endpointString);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!(disabledMethodObj instanceof String)) {
|
||||
logger.error("Found non-String value in disabled HTTP methods of endpoint '{}', skipping.", endpointString);
|
||||
continue;
|
||||
}
|
||||
|
||||
String disabledMethodAsString = (String) disabledMethodObj;
|
||||
|
||||
// Provide support for '*', means all methods
|
||||
if (disabledMethodAsString.trim().equals("*")) {
|
||||
disabledMethods.addAll(Arrays.asList(Method.values()));
|
||||
break;
|
||||
}
|
||||
// no wild card, disabled method must be one of
|
||||
// RestRequest.Method
|
||||
Method disabledMethod = null;
|
||||
try {
|
||||
disabledMethod = Method.valueOf(disabledMethodAsString.toUpperCase());
|
||||
} catch (Exception e) {
|
||||
logger.error("Invalid HTTP method '{}' found in disabled HTTP methods of endpoint '{}', skipping.", disabledMethodAsString.toUpperCase(), endpointString);
|
||||
continue;
|
||||
}
|
||||
disabledMethods.add(disabledMethod);
|
||||
}
|
||||
|
||||
disabledEndpoints.put(endpoint, disabledMethods);
|
||||
|
||||
}
|
||||
return disabledEndpoints;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the current request is allowed to use the REST API and the
|
||||
* requested end point. Using an admin certificate grants all permissions. A
|
||||
* user/role can have restricted end points.
|
||||
*
|
||||
* @return an error message if user does not have access, null otherwise
|
||||
* TODO: log failed attempt in audit log
|
||||
*/
|
||||
public String checkAccessPermissions(RestRequest request, Endpoint endpoint) throws IOException {
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Checking admin access for endpoint {}, path {} and method {}", endpoint.name(), request.path(), request.method().name());
|
||||
}
|
||||
|
||||
String roleBasedAccessFailureReason = checkRoleBasedAccessPermissions(request, endpoint);
|
||||
// Role based access granted
|
||||
if (roleBasedAccessFailureReason == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String certBasedAccessFailureReason = checkAdminCertBasedAccessPermissions(request);
|
||||
// TLS access granted, skip checking roles
|
||||
if (certBasedAccessFailureReason == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
return constructAccessErrorMessage(roleBasedAccessFailureReason, certBasedAccessFailureReason);
|
||||
}
|
||||
|
||||
public Boolean currentUserHasRestApiAccess(Set<String> userRoles) {
|
||||
|
||||
// check if user has any role that grants access
|
||||
return !Collections.disjoint(allowedRoles, userRoles);
|
||||
|
||||
}
|
||||
|
||||
public Map<Endpoint, List<Method>> getDisabledEndpointsForCurrentUser(String userPrincipal, Set<String> userRoles) {
|
||||
|
||||
// cache
|
||||
if (disabledEndpointsForUsers.containsKey(userPrincipal)) {
|
||||
return disabledEndpointsForUsers.get(userPrincipal);
|
||||
}
|
||||
|
||||
if (!currentUserHasRestApiAccess(userRoles)) {
|
||||
return this.allEndpoints;
|
||||
}
|
||||
|
||||
// will contain the final list of disabled endpoints and methods
|
||||
Map<Endpoint, List<Method>> finalEndpoints = new HashMap<>();
|
||||
|
||||
// List of all disabled endpoints for user. Disabled endpoints must be configured in all
|
||||
// roles to take effect. If a role contains a disabled endpoint, but another role
|
||||
// allows this endpoint (i.e. not contained in the disabled endpoints for this role),
|
||||
// the access is allowed.
|
||||
|
||||
// make list mutable
|
||||
List<Endpoint> remainingEndpoints = new LinkedList<>(Arrays.asList(Endpoint.values()));
|
||||
|
||||
// only retain endpoints contained in all roles for user
|
||||
boolean hasDisabledEndpoints = false;
|
||||
for (String userRole : userRoles) {
|
||||
Map<Endpoint, List<Method>> endpointsForRole = disabledEndpointsForRoles.get(userRole);
|
||||
if (endpointsForRole == null || endpointsForRole.isEmpty()) {
|
||||
continue;
|
||||
}
|
||||
Set<Endpoint> disabledEndpoints = endpointsForRole.keySet();
|
||||
remainingEndpoints.retainAll(disabledEndpoints);
|
||||
hasDisabledEndpoints = true;
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Remaining endpoints for user {} after retaining all : {}", userPrincipal, remainingEndpoints);
|
||||
}
|
||||
|
||||
// if user does not have any disabled endpoints, only globally disabled endpoints apply
|
||||
if (!hasDisabledEndpoints) {
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("No disabled endpoints for user {} at all, only globally disabledendpoints apply.", userPrincipal, remainingEndpoints);
|
||||
}
|
||||
disabledEndpointsForUsers.put(userPrincipal, addGloballyDisabledEndpoints(finalEndpoints));
|
||||
return finalEndpoints;
|
||||
|
||||
}
|
||||
|
||||
// one or more disabled remaining endpoints, keep only
|
||||
// methods contained in all roles for each endpoint
|
||||
for (Endpoint endpoint : remainingEndpoints) {
|
||||
// make list mutable
|
||||
List<Method> remainingMethodsForEndpoint = new LinkedList<>(Arrays.asList(Method.values()));
|
||||
for (String userRole : userRoles) {
|
||||
Map<Endpoint, List<Method>> endpoints = disabledEndpointsForRoles.get(userRole);
|
||||
if (endpoints != null && !endpoints.isEmpty()) {
|
||||
remainingMethodsForEndpoint.retainAll(endpoints.get(endpoint));
|
||||
}
|
||||
}
|
||||
|
||||
finalEndpoints.put(endpoint, remainingMethodsForEndpoint);
|
||||
}
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Disabled endpoints for user {} after retaining all : {}", userPrincipal, finalEndpoints);
|
||||
}
|
||||
|
||||
// add globally disabled endpoints and methods, will always be disabled
|
||||
addGloballyDisabledEndpoints(finalEndpoints);
|
||||
disabledEndpointsForUsers.put(userPrincipal, finalEndpoints);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Disabled endpoints for user {} after retaining all : {}", disabledEndpointsForUsers.get(userPrincipal));
|
||||
}
|
||||
|
||||
return disabledEndpointsForUsers.get(userPrincipal);
|
||||
}
|
||||
|
||||
private Map<Endpoint, List<Method>> addGloballyDisabledEndpoints(Map<Endpoint, List<Method>> endpoints) {
|
||||
if(globallyDisabledEndpoints != null && !globallyDisabledEndpoints.isEmpty()) {
|
||||
Set<Endpoint> globalEndoints = globallyDisabledEndpoints.keySet();
|
||||
for(Endpoint endpoint : globalEndoints) {
|
||||
endpoints.putIfAbsent(endpoint, new LinkedList<>());
|
||||
endpoints.get(endpoint).addAll(globallyDisabledEndpoints.get(endpoint));
|
||||
}
|
||||
}
|
||||
return endpoints;
|
||||
}
|
||||
|
||||
private String checkRoleBasedAccessPermissions(RestRequest request, Endpoint endpoint) {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("Checking role based admin access for endpoint {} and method {}", endpoint.name(), request.method().name());
|
||||
}
|
||||
// Role based access. Check that user has role suitable for admin access
|
||||
// and that the role has also access to this endpoint.
|
||||
if (this.roleBasedAccessEnabled) {
|
||||
|
||||
// get current user and roles
|
||||
final User user = (User) threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_USER);
|
||||
final TransportAddress remoteAddress = (TransportAddress) threadPool.getThreadContext().getTransient(ConfigConstants.OPENDISTRO_SECURITY_REMOTE_ADDRESS);
|
||||
|
||||
// map the users Security roles
|
||||
Set<String> userRoles = privilegesEvaluator.mapSecurityRoles(user, remoteAddress);
|
||||
|
||||
// check if user has any role that grants access
|
||||
if (currentUserHasRestApiAccess(userRoles)) {
|
||||
// yes, calculate disabled end points. Since a user can have
|
||||
// multiple roles, the endpoint
|
||||
// needs to be disabled in all roles.
|
||||
|
||||
Map<Endpoint, List<Method>> disabledEndpointsForUser = getDisabledEndpointsForCurrentUser(user.getName(), userRoles);
|
||||
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Disabled endpoints for user {} : {} ", user, disabledEndpointsForUser);
|
||||
}
|
||||
|
||||
// check if we have any disabled methods for this endpoint
|
||||
List<Method> disabledMethodsForEndpoint = disabledEndpointsForUser.get(endpoint);
|
||||
|
||||
// no settings, all methods for this endpoint allowed
|
||||
if (disabledMethodsForEndpoint == null || disabledMethodsForEndpoint.isEmpty()) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("No disabled methods for user {} and endpoint {}, access allowed ", user, endpoint);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
// some methods disabled, check requested method
|
||||
if (!disabledMethodsForEndpoint.contains(request.method())) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Request method {} for user {} and endpoint {} not restricted, access allowed ", request.method(), user, endpoint);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
logger.info("User {} with Open Distro Security Roles {} does not have access to endpoint {} and method {}, checking admin TLS certificate now.", user, userRoles,
|
||||
endpoint.name(), request.method());
|
||||
return "User " + user.getName() + " with Open Distro Security Roles " + userRoles + " does not have any access to endpoint " + endpoint.name() + " and method "
|
||||
+ request.method().name();
|
||||
} else {
|
||||
// no, but maybe the request contains a client certificate.
|
||||
// Remember error reason for better response message later on.
|
||||
logger.info("User {} with Open Distro Security roles {} does not have any role privileged for admin access.", user, userRoles);
|
||||
return "User " + user.getName() + " with Open Distro Security Roles " + userRoles + " does not have any role privileged for admin access";
|
||||
}
|
||||
}
|
||||
return "Role based access not enabled.";
|
||||
}
|
||||
|
||||
private String checkAdminCertBasedAccessPermissions(RestRequest request) throws IOException {
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("Checking certificate based admin access for path {} and method {}", request.path(), request.method().name());
|
||||
}
|
||||
|
||||
// Certificate based access, Check if we have an admin TLS certificate
|
||||
SSLInfo sslInfo = SSLRequestHelper.getSSLInfo(settings, configPath, request, principalExtractor);
|
||||
|
||||
if (sslInfo == null) {
|
||||
// here we log on error level, since authentication finally failed
|
||||
logger.warn("No ssl info found in request.");
|
||||
return "No ssl info found in request.";
|
||||
}
|
||||
|
||||
X509Certificate[] certs = sslInfo.getX509Certs();
|
||||
|
||||
if (certs == null || certs.length == 0) {
|
||||
logger.warn("No client TLS certificate found in request");
|
||||
return "No client TLS certificate found in request";
|
||||
}
|
||||
|
||||
if (!adminDNs.isAdminDN(sslInfo.getPrincipal())) {
|
||||
logger.warn("Security admin permissions required but {} is not an admin", sslInfo.getPrincipal());
|
||||
return "Security admin permissions required but " + sslInfo.getPrincipal() + " is not an admin";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private String constructAccessErrorMessage(String roleBasedAccessFailure, String certBasedAccessFailure) {
|
||||
return roleBasedAccessFailure + ". " + certBasedAccessFailure;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,74 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.RolesValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class RolesApiAction extends PatchableResourceApiAction {
|
||||
|
||||
@Inject
|
||||
public RolesApiAction(Settings settings, final Path configPath, RestController controller, Client client, AdminDNs adminDNs, IndexBaseConfigurationRepository cl,
|
||||
ClusterService cs, final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool, auditLog);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/roles/", this);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/roles/{name}", this);
|
||||
controller.registerHandler(Method.DELETE, "/_opendistro/_security/api/roles/{name}", this);
|
||||
controller.registerHandler(Method.PUT, "/_opendistro/_security/api/roles/{name}", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/roles/", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/roles/{name}", this);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.ROLES;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... param) {
|
||||
return new RolesValidator(request, ref, this.settings, param);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
return "role";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
return ConfigConstants.CONFIGNAME_ROLES;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,76 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.api;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.auditlog.AuditLog;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.AdminDNs;
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.IndexBaseConfigurationRepository;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.AbstractConfigurationValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.dlic.rest.validation.RolesMappingValidator;
|
||||
import com.amazon.opendistroforelasticsearch.security.privileges.PrivilegesEvaluator;
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.transport.PrincipalExtractor;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class RolesMappingApiAction extends PatchableResourceApiAction {
|
||||
|
||||
@Inject
|
||||
public RolesMappingApiAction(final Settings settings, final Path configPath, final RestController controller, final Client client,
|
||||
final AdminDNs adminDNs, final IndexBaseConfigurationRepository cl, final ClusterService cs,
|
||||
final PrincipalExtractor principalExtractor, final PrivilegesEvaluator evaluator, ThreadPool threadPool, AuditLog auditLog) {
|
||||
super(settings, configPath, controller, client, adminDNs, cl, cs, principalExtractor, evaluator, threadPool, auditLog);
|
||||
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/rolesmapping/", this);
|
||||
controller.registerHandler(Method.GET, "/_opendistro/_security/api/rolesmapping/{name}", this);
|
||||
controller.registerHandler(Method.DELETE, "/_opendistro/_security/api/rolesmapping/{name}", this);
|
||||
controller.registerHandler(Method.PUT, "/_opendistro/_security/api/rolesmapping/{name}", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/rolesmapping/", this);
|
||||
controller.registerHandler(Method.PATCH, "/_opendistro/_security/api/rolesmapping/{name}", this);
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Endpoint getEndpoint() {
|
||||
return Endpoint.ROLESMAPPING;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AbstractConfigurationValidator getValidator(RestRequest request, BytesReference ref, Object... param) {
|
||||
return new RolesMappingValidator(request, ref, this.settings, param);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceName() {
|
||||
return "rolesmapping";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getConfigName() {
|
||||
return ConfigConstants.CONFIGNAME_ROLES_MAPPING;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.support;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.DefaultObjectMapper;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.OpenDistroSecurityDeprecationHandler;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
|
||||
public class Utils {
|
||||
|
||||
public static Map<String, Object> convertJsonToxToStructuredMap(ToXContent jsonContent) {
|
||||
Map<String, Object> map = null;
|
||||
try {
|
||||
final BytesReference bytes = XContentHelper.toXContent(jsonContent, XContentType.JSON, false);
|
||||
map = XContentHelper.convertToMap(bytes, false, XContentType.JSON).v2();
|
||||
} catch (IOException e1) {
|
||||
throw ExceptionsHelper.convertToElastic(e1);
|
||||
}
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
public static Map<String, Object> convertJsonToxToStructuredMap(String jsonContent) {
|
||||
try (XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, OpenDistroSecurityDeprecationHandler.INSTANCE, jsonContent)) {
|
||||
return parser.map();
|
||||
} catch (IOException e1) {
|
||||
throw ExceptionsHelper.convertToElastic(e1);
|
||||
}
|
||||
}
|
||||
|
||||
public static BytesReference convertStructuredMapToBytes(Map<String, Object> structuredMap) {
|
||||
try {
|
||||
return BytesReference.bytes(JsonXContent.contentBuilder().map(structuredMap));
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("Failed to convert map", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static String convertStructuredMapToJson(Map<String, Object> structuredMap) {
|
||||
try {
|
||||
return XContentHelper.convertToJson(convertStructuredMapToBytes(structuredMap), false, XContentType.JSON);
|
||||
} catch (IOException e) {
|
||||
throw new ElasticsearchParseException("Failed to convert map", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static JsonNode convertJsonToJackson(ToXContent jsonContent) {
|
||||
try {
|
||||
final BytesReference bytes = XContentHelper.toXContent(jsonContent, XContentType.JSON, false);
|
||||
return DefaultObjectMapper.objectMapper.readTree(bytes.utf8ToString());
|
||||
} catch (IOException e1) {
|
||||
throw ExceptionsHelper.convertToElastic(e1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,296 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.validation;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import com.google.common.base.Joiner;
|
||||
|
||||
public abstract class AbstractConfigurationValidator {
|
||||
|
||||
JsonFactory factory = new JsonFactory();
|
||||
|
||||
/* public for testing */
|
||||
public final static String INVALID_KEYS_KEY = "invalid_keys";
|
||||
|
||||
/* public for testing */
|
||||
public final static String MISSING_MANDATORY_KEYS_KEY = "missing_mandatory_keys";
|
||||
|
||||
/* public for testing */
|
||||
public final static String MISSING_MANDATORY_OR_KEYS_KEY = "specify_one_of";
|
||||
|
||||
protected final Logger log = LogManager.getLogger(this.getClass());
|
||||
|
||||
/** Define the various keys for this validator */
|
||||
protected final Map<String, DataType> allowedKeys = new HashMap<>();
|
||||
|
||||
protected final Set<String> mandatoryKeys = new HashSet<>();
|
||||
|
||||
protected final Set<String> mandatoryOrKeys = new HashSet<>();
|
||||
|
||||
protected final Map<String, String> wrongDatatypes = new HashMap<>();
|
||||
|
||||
/** Contain errorneous keys */
|
||||
protected final Set<String> missingMandatoryKeys = new HashSet<>();
|
||||
|
||||
protected final Set<String> invalidKeys = new HashSet<>();
|
||||
|
||||
protected final Set<String> missingMandatoryOrKeys = new HashSet<>();
|
||||
|
||||
/** The error type */
|
||||
protected ErrorType errorType = ErrorType.NONE;
|
||||
|
||||
/** Behaviour regarding payload */
|
||||
protected boolean payloadMandatory = false;
|
||||
|
||||
protected boolean payloadAllowed = true;
|
||||
|
||||
private Settings.Builder settingsBuilder;
|
||||
|
||||
protected final Method method;
|
||||
|
||||
protected final BytesReference content;
|
||||
|
||||
protected final Settings esSettings;
|
||||
|
||||
protected final RestRequest request;
|
||||
|
||||
protected final Object[] param;
|
||||
|
||||
public AbstractConfigurationValidator(final RestRequest request, final BytesReference ref, final Settings esSettings, Object... param) {
|
||||
this.content = ref;
|
||||
this.method = request.method();
|
||||
this.esSettings = esSettings;
|
||||
this.request = request;
|
||||
this.param = param;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return false if validation fails
|
||||
*/
|
||||
public boolean validateSettings() {
|
||||
// no payload for DELETE and GET requests
|
||||
if (method.equals(Method.DELETE) || method.equals(Method.GET)) {
|
||||
return true;
|
||||
}
|
||||
// try to parse payload
|
||||
try {
|
||||
this.settingsBuilder = toSettingsBuilder(content);
|
||||
} catch (ElasticsearchException e) {
|
||||
this.errorType = ErrorType.BODY_NOT_PARSEABLE;
|
||||
return false;
|
||||
}
|
||||
|
||||
Settings settings = settingsBuilder.build();
|
||||
|
||||
Set<String> requested = new HashSet<String>(settings.names());
|
||||
// check if payload is accepted at all
|
||||
if (!this.payloadAllowed && !requested.isEmpty()) {
|
||||
this.errorType = ErrorType.PAYLOAD_NOT_ALLOWED;
|
||||
return false;
|
||||
}
|
||||
// check if payload is mandatory
|
||||
if (this.payloadMandatory && requested.isEmpty()) {
|
||||
this.errorType = ErrorType.PAYLOAD_MANDATORY;
|
||||
return false;
|
||||
}
|
||||
|
||||
// mandatory settings, one of ...
|
||||
if (Collections.disjoint(requested, mandatoryOrKeys)) {
|
||||
this.missingMandatoryOrKeys.addAll(mandatoryOrKeys);
|
||||
}
|
||||
|
||||
// mandatory settings
|
||||
Set<String> mandatory = new HashSet<>(mandatoryKeys);
|
||||
mandatory.removeAll(requested);
|
||||
missingMandatoryKeys.addAll(mandatory);
|
||||
|
||||
// invalid settings
|
||||
Set<String> allowed = new HashSet<>(allowedKeys.keySet());
|
||||
requested.removeAll(allowed);
|
||||
this.invalidKeys.addAll(requested);
|
||||
boolean valid = missingMandatoryKeys.isEmpty() && invalidKeys.isEmpty() && missingMandatoryOrKeys.isEmpty();
|
||||
if (!valid) {
|
||||
this.errorType = ErrorType.INVALID_CONFIGURATION;
|
||||
}
|
||||
|
||||
// check types
|
||||
try {
|
||||
if (!checkDatatypes()) {
|
||||
this.errorType = ErrorType.WRONG_DATATYPE;
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
this.errorType = ErrorType.BODY_NOT_PARSEABLE;
|
||||
return false;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
private boolean checkDatatypes() throws Exception {
|
||||
String contentAsJson = XContentHelper.convertToJson(content, false, XContentType.YAML);
|
||||
try(JsonParser parser = factory.createParser(contentAsJson)) {
|
||||
JsonToken token = null;
|
||||
while ((token = parser.nextToken()) != null) {
|
||||
if(token.equals(JsonToken.FIELD_NAME)) {
|
||||
String currentName = parser.getCurrentName();
|
||||
DataType dataType = allowedKeys.get(currentName);
|
||||
if(dataType != null) {
|
||||
JsonToken valueToken = parser.nextToken();
|
||||
switch (dataType) {
|
||||
case STRING:
|
||||
if(!valueToken.equals(JsonToken.VALUE_STRING)) {
|
||||
wrongDatatypes.put(currentName, "String expected");
|
||||
}
|
||||
break;
|
||||
case ARRAY:
|
||||
if(!valueToken.equals(JsonToken.START_ARRAY) && !valueToken.equals(JsonToken.END_ARRAY)) {
|
||||
wrongDatatypes.put(currentName, "Array expected");
|
||||
}
|
||||
break;
|
||||
case OBJECT:
|
||||
if(!valueToken.equals(JsonToken.START_OBJECT) && !valueToken.equals(JsonToken.END_OBJECT)) {
|
||||
wrongDatatypes.put(currentName, "Object expected");
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return wrongDatatypes.isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
public XContentBuilder errorsAsXContent() {
|
||||
try {
|
||||
final XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
switch (this.errorType) {
|
||||
case NONE:
|
||||
builder.field("status", "error");
|
||||
builder.field("reason", errorType.getMessage());
|
||||
break;
|
||||
case INVALID_CONFIGURATION:
|
||||
builder.field("status", "error");
|
||||
builder.field("reason", ErrorType.INVALID_CONFIGURATION.getMessage());
|
||||
addErrorMessage(builder, INVALID_KEYS_KEY, invalidKeys);
|
||||
addErrorMessage(builder, MISSING_MANDATORY_KEYS_KEY, missingMandatoryKeys);
|
||||
addErrorMessage(builder, MISSING_MANDATORY_OR_KEYS_KEY, missingMandatoryKeys);
|
||||
break;
|
||||
case INVALID_PASSWORD:
|
||||
builder.field("status", "error");
|
||||
builder.field("reason", esSettings.get(ConfigConstants.OPENDISTRO_SECURITY_RESTAPI_PASSWORD_VALIDATION_ERROR_MESSAGE,"Password does not match minimum criterias"));
|
||||
break;
|
||||
case WRONG_DATATYPE:
|
||||
builder.field("status", "error");
|
||||
builder.field("reason", ErrorType.WRONG_DATATYPE.getMessage());
|
||||
for (Entry<String, String> entry : wrongDatatypes.entrySet()) {
|
||||
builder.field( entry.getKey(), entry.getValue());
|
||||
}
|
||||
break;
|
||||
default:
|
||||
builder.field("status", "error");
|
||||
builder.field("reason", errorType.getMessage());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
} catch (IOException ex) {
|
||||
log.error("Cannot build error settings", ex);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public Settings.Builder settingsBuilder() {
|
||||
return settingsBuilder;
|
||||
}
|
||||
|
||||
private void addErrorMessage(final XContentBuilder builder, final String message, final Set<String> keys)
|
||||
throws IOException {
|
||||
if (!keys.isEmpty()) {
|
||||
builder.startObject(message);
|
||||
builder.field("keys", Joiner.on(",").join(keys.toArray(new String[0])));
|
||||
builder.endObject();
|
||||
}
|
||||
}
|
||||
|
||||
private Settings.Builder toSettingsBuilder(final BytesReference ref) {
|
||||
if (ref == null || ref.length() == 0) {
|
||||
return Settings.builder();
|
||||
}
|
||||
|
||||
try {
|
||||
return Settings.builder().loadFromSource(ref.utf8ToString(), XContentType.JSON);
|
||||
} catch (final Exception e) {
|
||||
throw ExceptionsHelper.convertToElastic(e);
|
||||
}
|
||||
}
|
||||
|
||||
public static enum DataType {
|
||||
STRING,
|
||||
ARRAY,
|
||||
OBJECT;
|
||||
}
|
||||
|
||||
public static enum ErrorType {
|
||||
NONE("ok"),
|
||||
INVALID_CONFIGURATION("Invalid configuration"),
|
||||
INVALID_PASSWORD("Invalid password"),
|
||||
WRONG_DATATYPE("Wrong datatype"),
|
||||
BODY_NOT_PARSEABLE("Could not parse content of request."),
|
||||
PAYLOAD_NOT_ALLOWED("Request body not allowed for this action."),
|
||||
PAYLOAD_MANDATORY("Request body required for this action."),
|
||||
OPENDISTRO_SECURITY_NOT_INITIALIZED("Open Distro Security index not initialized.");
|
||||
|
||||
private String message;
|
||||
|
||||
private ErrorType(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
}
|
||||
|
||||
protected final boolean hasParams() {
|
||||
return param != null && param.length>0;
|
||||
}
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.validation;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
|
||||
public class ActionGroupValidator extends AbstractConfigurationValidator {
|
||||
|
||||
public ActionGroupValidator(final RestRequest request, BytesReference ref, final Settings esSettings, Object... param) {
|
||||
super(request, ref, esSettings, param);
|
||||
this.payloadMandatory = true;
|
||||
allowedKeys.put("permissions", DataType.ARRAY);
|
||||
mandatoryKeys.add("permissions");
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,103 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.validation;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.compress.NotXContentException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.ssl.util.Utils;
|
||||
import com.amazon.opendistroforelasticsearch.security.support.ConfigConstants;
|
||||
|
||||
public class InternalUsersValidator extends AbstractConfigurationValidator {
|
||||
|
||||
public InternalUsersValidator(final RestRequest request, BytesReference ref, final Settings esSettings,
|
||||
Object... param) {
|
||||
super(request, ref, esSettings, param);
|
||||
this.payloadMandatory = true;
|
||||
allowedKeys.put("hash", DataType.STRING);
|
||||
allowedKeys.put("password", DataType.STRING);
|
||||
allowedKeys.put("roles", DataType.ARRAY);
|
||||
allowedKeys.put("attributes", DataType.OBJECT);
|
||||
allowedKeys.put("username", DataType.STRING);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validateSettings() {
|
||||
if(!super.validateSettings()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final String regex = this.esSettings.get(ConfigConstants.OPENDISTRO_SECURITY_RESTAPI_PASSWORD_VALIDATION_REGEX, null);
|
||||
|
||||
if((request.method() == Method.PUT || request.method() == Method.PATCH )
|
||||
&& regex != null
|
||||
&& !regex.isEmpty()
|
||||
&& this.content != null
|
||||
&& this.content.length() > 1) {
|
||||
try {
|
||||
final Map<String, Object> contentAsMap = XContentHelper.convertToMap(this.content, false, XContentType.JSON).v2();
|
||||
if(contentAsMap != null && contentAsMap.containsKey("password")) {
|
||||
final String password = (String) contentAsMap.get("password");
|
||||
|
||||
if(password == null || password.isEmpty()) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Unable to validate password because no password is given");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!regex.isEmpty() && !Pattern.compile("^"+regex+"$").matcher(password).matches()) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Regex does not match password");
|
||||
}
|
||||
this.errorType = ErrorType.INVALID_PASSWORD;
|
||||
return false;
|
||||
}
|
||||
|
||||
final String username = Utils.coalesce(request.param("name"), hasParams()?(String)param[0]:null);
|
||||
|
||||
if(username == null || username.isEmpty()) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Unable to validate username because no user is given");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if(username.toLowerCase().equals(password.toLowerCase())) {
|
||||
if(log.isDebugEnabled()) {
|
||||
log.debug("Username must not match password");
|
||||
}
|
||||
this.errorType = ErrorType.INVALID_PASSWORD;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
} catch (NotXContentException e) {
|
||||
//this.content is not valid json/yaml
|
||||
log.error("Invalid xContent: "+e,e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.validation;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
|
||||
public class NoOpValidator extends AbstractConfigurationValidator {
|
||||
|
||||
public NoOpValidator(final RestRequest request, BytesReference ref, final Settings esSettings, Object... param) {
|
||||
super(request, ref, esSettings, param);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.validation;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
|
||||
public class RolesMappingValidator extends AbstractConfigurationValidator {
|
||||
|
||||
public RolesMappingValidator(final RestRequest request, final BytesReference ref, final Settings esSettings, Object... param) {
|
||||
super(request, ref, esSettings, param);
|
||||
this.payloadMandatory = true;
|
||||
allowedKeys.put("backendroles", DataType.ARRAY);
|
||||
allowedKeys.put("hosts", DataType.ARRAY);
|
||||
allowedKeys.put("users", DataType.ARRAY);
|
||||
|
||||
mandatoryOrKeys.add("backendroles");
|
||||
mandatoryOrKeys.add("hosts");
|
||||
mandatoryOrKeys.add("users");
|
||||
}
|
||||
}
|
@ -0,0 +1,81 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.dlic.rest.validation;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.configuration.MaskedField;
|
||||
import com.jayway.jsonpath.JsonPath;
|
||||
import com.jayway.jsonpath.ReadContext;
|
||||
|
||||
public class RolesValidator extends AbstractConfigurationValidator {
|
||||
|
||||
public RolesValidator(final RestRequest request, final BytesReference ref, final Settings esSettings, Object... param) {
|
||||
super(request, ref, esSettings, param);
|
||||
this.payloadMandatory = true;
|
||||
allowedKeys.put("indices", DataType.OBJECT);
|
||||
allowedKeys.put("cluster", DataType.ARRAY);
|
||||
allowedKeys.put("tenants", DataType.OBJECT);
|
||||
|
||||
mandatoryOrKeys.add("indices");
|
||||
mandatoryOrKeys.add("cluster");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validateSettings() {
|
||||
|
||||
if (!super.validateSettings()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean valid=true;
|
||||
|
||||
if (this.content != null && this.content.length() > 0) {
|
||||
|
||||
final ReadContext ctx = JsonPath.parse(this.content.utf8ToString());
|
||||
final List<String> maskedFields = ctx.read("$.._masked_fields_[*]");
|
||||
|
||||
if (maskedFields != null) {
|
||||
|
||||
for (String mf : maskedFields) {
|
||||
if (!validateMaskedFieldSyntax(mf)) {
|
||||
valid = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(!valid) {
|
||||
this.errorType = ErrorType.WRONG_DATATYPE;
|
||||
}
|
||||
|
||||
return valid;
|
||||
}
|
||||
|
||||
private boolean validateMaskedFieldSyntax(String mf) {
|
||||
try {
|
||||
new MaskedField(mf, new byte[] {1,2,3,4,5,1,2,3,4,5,1,2,3,4,5,6}).isValid();
|
||||
} catch (Exception e) {
|
||||
wrongDatatypes.put("Masked field not valid: "+mf, e.getMessage());
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
@ -0,0 +1,275 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.opendistroforelasticsearch.security.httpclient;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.net.Socket;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.KeyStore;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.UnrecoverableKeyException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.net.ssl.HostnameVerifier;
|
||||
import javax.net.ssl.SSLContext;
|
||||
|
||||
import org.apache.http.HttpHeaders;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.client.config.RequestConfig;
|
||||
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
|
||||
import org.apache.http.conn.ssl.NoopHostnameVerifier;
|
||||
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
|
||||
import org.apache.http.ssl.PrivateKeyDetails;
|
||||
import org.apache.http.ssl.PrivateKeyStrategy;
|
||||
import org.apache.http.ssl.SSLContextBuilder;
|
||||
import org.apache.http.ssl.SSLContexts;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
public class HttpClient implements Closeable {
|
||||
|
||||
public static class HttpClientBuilder {
|
||||
|
||||
private KeyStore trustStore;
|
||||
private String basicCredentials;
|
||||
private KeyStore keystore;
|
||||
private String keystoreAlias;
|
||||
private char[] keyPassword;
|
||||
private boolean verifyHostnames;
|
||||
private String[] supportedProtocols = null;
|
||||
private String[] supportedCipherSuites = null;
|
||||
|
||||
private final String[] servers;
|
||||
private boolean ssl;
|
||||
|
||||
private HttpClientBuilder(final String... servers) {
|
||||
super();
|
||||
this.servers = Objects.requireNonNull(servers);
|
||||
if (this.servers.length == 0) {
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
}
|
||||
|
||||
public HttpClientBuilder enableSsl(final KeyStore trustStore, final boolean verifyHostnames) {
|
||||
this.ssl = true;
|
||||
this.trustStore = Objects.requireNonNull(trustStore);
|
||||
this.verifyHostnames = verifyHostnames;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HttpClientBuilder setBasicCredentials(final String username, final String password) {
|
||||
basicCredentials = encodeBasicHeader(Objects.requireNonNull(username), Objects.requireNonNull(password));
|
||||
return this;
|
||||
}
|
||||
|
||||
public HttpClientBuilder setPkiCredentials(final KeyStore keystore, final char[] keyPassword, final String keystoreAlias) {
|
||||
this.keystore = Objects.requireNonNull(keystore);
|
||||
this.keyPassword = keyPassword;
|
||||
this.keystoreAlias = keystoreAlias;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HttpClientBuilder setSupportedProtocols(String[] protocols) {
|
||||
this.supportedProtocols = protocols;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HttpClientBuilder setSupportedCipherSuites(String[] cipherSuites) {
|
||||
this.supportedCipherSuites = cipherSuites;
|
||||
return this;
|
||||
}
|
||||
|
||||
public HttpClient build() throws Exception {
|
||||
return new HttpClient(trustStore, basicCredentials, keystore, keyPassword, keystoreAlias, verifyHostnames, ssl,
|
||||
supportedProtocols, supportedCipherSuites, servers);
|
||||
}
|
||||
|
||||
private static String encodeBasicHeader(final String username, final String password) {
|
||||
return Base64.getEncoder().encodeToString((username + ":" + Objects.requireNonNull(password)).getBytes(StandardCharsets.UTF_8));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static HttpClientBuilder builder(final String... servers) {
|
||||
return new HttpClientBuilder(servers);
|
||||
}
|
||||
|
||||
private final KeyStore trustStore;
|
||||
private final Logger log = LogManager.getLogger(this.getClass());
|
||||
private RestHighLevelClient rclient;
|
||||
private String basicCredentials;
|
||||
private KeyStore keystore;
|
||||
private String keystoreAlias;
|
||||
private char[] keyPassword;
|
||||
private boolean verifyHostnames;
|
||||
private boolean ssl;
|
||||
private String[] supportedProtocols;
|
||||
private String[] supportedCipherSuites;
|
||||
|
||||
private HttpClient(final KeyStore trustStore, final String basicCredentials, final KeyStore keystore,
|
||||
final char[] keyPassword, final String keystoreAlias, final boolean verifyHostnames, final boolean ssl, String[] supportedProtocols, String[] supportedCipherSuites, final String... servers)
|
||||
throws UnrecoverableKeyException, KeyManagementException, NoSuchAlgorithmException, KeyStoreException, CertificateException,
|
||||
IOException {
|
||||
super();
|
||||
this.trustStore = trustStore;
|
||||
this.basicCredentials = basicCredentials;
|
||||
this.keystore = keystore;
|
||||
this.keyPassword = keyPassword;
|
||||
this.verifyHostnames = verifyHostnames;
|
||||
this.ssl = ssl;
|
||||
this.supportedProtocols = supportedProtocols;
|
||||
this.supportedCipherSuites = supportedCipherSuites;
|
||||
this.keystoreAlias = keystoreAlias;
|
||||
|
||||
HttpHost[] hosts = Arrays.stream(servers)
|
||||
.map(s->s.split(":"))
|
||||
.map(s->new HttpHost(s[0], Integer.parseInt(s[1]),ssl?"https":"http"))
|
||||
.collect(Collectors.toList()).toArray(new HttpHost[0]);
|
||||
|
||||
|
||||
RestClientBuilder builder = RestClient.builder(hosts);
|
||||
//builder.setMaxRetryTimeoutMillis(10000);
|
||||
/*builder.setFailureListener(new RestClient.FailureListener() {
|
||||
|
||||
@Override
|
||||
public void onFailure(Node node) {
|
||||
|
||||
}
|
||||
|
||||
});*/
|
||||
|
||||
builder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() {
|
||||
@Override
|
||||
public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) {
|
||||
try {
|
||||
return asyncClientBuilder(httpClientBuilder);
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to build http client",e);
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
rclient = new RestHighLevelClient(builder);
|
||||
}
|
||||
|
||||
public boolean index(final String content, final String index, final String type, final boolean refresh) {
|
||||
|
||||
try {
|
||||
|
||||
final IndexResponse response = rclient.index(new IndexRequest(index, type)
|
||||
.setRefreshPolicy(refresh?RefreshPolicy.IMMEDIATE:RefreshPolicy.NONE)
|
||||
.source(content, XContentType.JSON), RequestOptions.DEFAULT);
|
||||
|
||||
return response.getShardInfo().getSuccessful() > 0 && response.getShardInfo().getFailed() == 0;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error(e.toString(),e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private final HttpAsyncClientBuilder asyncClientBuilder(HttpAsyncClientBuilder httpClientBuilder)
|
||||
throws NoSuchAlgorithmException, KeyStoreException, UnrecoverableKeyException, KeyManagementException {
|
||||
|
||||
// basic auth
|
||||
// pki auth
|
||||
|
||||
if (ssl) {
|
||||
|
||||
final SSLContextBuilder sslContextBuilder = SSLContexts.custom();
|
||||
|
||||
if (log.isTraceEnabled()) {
|
||||
log.trace("Configure HTTP client with SSL");
|
||||
}
|
||||
|
||||
if (trustStore != null) {
|
||||
sslContextBuilder.loadTrustMaterial(trustStore, null);
|
||||
}
|
||||
|
||||
if (keystore != null) {
|
||||
sslContextBuilder.loadKeyMaterial(keystore, keyPassword, new PrivateKeyStrategy() {
|
||||
|
||||
@Override
|
||||
public String chooseAlias(Map<String, PrivateKeyDetails> aliases, Socket socket) {
|
||||
if(aliases == null || aliases.isEmpty()) {
|
||||
return keystoreAlias;
|
||||
}
|
||||
|
||||
if(keystoreAlias == null || keystoreAlias.isEmpty()) {
|
||||
return aliases.keySet().iterator().next();
|
||||
}
|
||||
|
||||
return keystoreAlias; }
|
||||
});
|
||||
}
|
||||
|
||||
final HostnameVerifier hnv = verifyHostnames?new DefaultHostnameVerifier():NoopHostnameVerifier.INSTANCE;
|
||||
|
||||
final SSLContext sslContext = sslContextBuilder.build();
|
||||
httpClientBuilder.setSSLStrategy(new SSLIOSessionStrategy(
|
||||
sslContext,
|
||||
supportedProtocols,
|
||||
supportedCipherSuites,
|
||||
hnv
|
||||
));
|
||||
}
|
||||
|
||||
if (basicCredentials != null) {
|
||||
httpClientBuilder.setDefaultHeaders(Lists.newArrayList(new BasicHeader(HttpHeaders.AUTHORIZATION, "Basic " + basicCredentials)));
|
||||
}
|
||||
|
||||
// TODO: set a timeout until we have a proper way to deal with back pressure
|
||||
int timeout = 5;
|
||||
|
||||
RequestConfig config = RequestConfig.custom()
|
||||
.setConnectTimeout(timeout * 1000)
|
||||
.setConnectionRequestTimeout(timeout * 1000)
|
||||
.setSocketTimeout(timeout * 1000).build();
|
||||
|
||||
httpClientBuilder.setDefaultRequestConfig(config);
|
||||
|
||||
return httpClientBuilder;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (rclient != null) {
|
||||
rclient.close();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,503 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt;
|
||||
|
||||
import io.jsonwebtoken.Jwts;
|
||||
import io.jsonwebtoken.SignatureAlgorithm;
|
||||
|
||||
import java.security.KeyPair;
|
||||
import java.security.KeyPairGenerator;
|
||||
import java.security.PrivateKey;
|
||||
import java.security.PublicKey;
|
||||
import java.security.SecureRandom;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.HTTPJwtAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.util.FakeRestRequest;
|
||||
import com.google.common.io.BaseEncoding;
|
||||
|
||||
public class HTTPJwtAuthenticatorTest {
|
||||
|
||||
final static byte[] secretKey = new byte[1024];
|
||||
|
||||
static {
|
||||
new SecureRandom().nextBytes(secretKey);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNoKey() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().build();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth =new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyKey() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().put("signing_key", "").build();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadKey() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(new byte[]{1,3,3,4,3,6,7,8,3,10})).build();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTokenMissing() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(secretKey)).build();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalid() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(secretKey)).build();
|
||||
|
||||
String jwsToken = "123invalidtoken..";
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBearer() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(secretKey)).build();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").setAudience("myaud").signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(2, creds.getAttributes().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBearerWrongPosition() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(secretKey)).build();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken + "Bearer " + " 123");
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNonBearer() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(secretKey)).build();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRoles() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("roles_key", "roles")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Leonard McCoy")
|
||||
.claim("roles", "role1,role2")
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(2, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNullClaim() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("roles_key", "roles")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Leonard McCoy")
|
||||
.claim("roles", null)
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNonStringClaim() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("roles_key", "roles")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Leonard McCoy")
|
||||
.claim("roles", 123L)
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(1, creds.getBackendRoles().size());
|
||||
Assert.assertTrue( creds.getBackendRoles().contains("123"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRolesMissing() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("roles_key", "roles")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Leonard McCoy")
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWrongSubjectKey() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("subject_key", "missing")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.claim("roles", "role1,role2")
|
||||
.claim("asub", "Dr. Who")
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAlternativeSubject() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("subject_key", "asub")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Leonard McCoy")
|
||||
.claim("roles", "role1,role2")
|
||||
.claim("asub", "Dr. Who")
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Dr. Who", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNonStringAlternativeSubject() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("subject_key", "asub")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Leonard McCoy")
|
||||
.claim("roles", "role1,role2")
|
||||
.claim("asub", false)
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("false", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUrlParam() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("jwt_url_parameter", "abc")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Leonard McCoy")
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
FakeRestRequest req = new FakeRestRequest(headers, new HashMap<String, String>());
|
||||
req.params().put("abc", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(req, null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExp() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Expired")
|
||||
.setExpiration(new Date(100))
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNbf() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setSubject("Expired")
|
||||
.setNotBefore(new Date(System.currentTimeMillis()+(1000*36000)))
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRS256() throws Exception {
|
||||
|
||||
KeyPairGenerator keyGen = KeyPairGenerator.getInstance("RSA");
|
||||
keyGen.initialize(2048);
|
||||
KeyPair pair = keyGen.generateKeyPair();
|
||||
PrivateKey priv = pair.getPrivate();
|
||||
PublicKey pub = pair.getPublic();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(SignatureAlgorithm.RS256, priv).compact();
|
||||
Settings settings = Settings.builder().put("signing_key", "-----BEGIN PUBLIC KEY-----\n"+BaseEncoding.base64().encode(pub.getEncoded())+"-----END PUBLIC KEY-----").build();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testES512() throws Exception {
|
||||
|
||||
KeyPairGenerator keyGen = KeyPairGenerator.getInstance("EC");
|
||||
keyGen.initialize(571);
|
||||
KeyPair pair = keyGen.generateKeyPair();
|
||||
PrivateKey priv = pair.getPrivate();
|
||||
PublicKey pub = pair.getPublic();
|
||||
|
||||
String jwsToken = Jwts.builder().setSubject("Leonard McCoy").signWith(SignatureAlgorithm.ES512, priv).compact();
|
||||
Settings settings = Settings.builder().put("signing_key", BaseEncoding.base64().encode(pub.getEncoded())).build();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("Leonard McCoy", creds.getUsername());
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void rolesArray() throws Exception {
|
||||
|
||||
|
||||
|
||||
Settings settings = Settings.builder()
|
||||
.put("signing_key", BaseEncoding.base64().encode(secretKey))
|
||||
.put("roles_key", "roles")
|
||||
.build();
|
||||
|
||||
String jwsToken = Jwts.builder()
|
||||
.setPayload("{"+
|
||||
"\"sub\": \"John Doe\","+
|
||||
"\"roles\": [\"a\",\"b\",\"3rd\"]"+
|
||||
"}")
|
||||
.signWith(SignatureAlgorithm.HS512, secretKey).compact();
|
||||
|
||||
HTTPJwtAuthenticator jwtAuth = new HTTPJwtAuthenticator(settings, null);
|
||||
Map<String, String> headers = new HashMap<String, String>();
|
||||
headers.put("Authorization", "Bearer "+jwsToken);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(headers, new HashMap<String, String>()), null);
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals("John Doe", creds.getUsername());
|
||||
Assert.assertEquals(3, creds.getBackendRoles().size());
|
||||
Assert.assertTrue(creds.getBackendRoles().contains("a"));
|
||||
Assert.assertTrue(creds.getBackendRoles().contains("b"));
|
||||
Assert.assertTrue(creds.getBackendRoles().contains("3rd"));
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import org.apache.cxf.jaxrs.json.basic.JsonMapObject;
|
||||
import org.apache.cxf.jaxrs.json.basic.JsonMapObjectReaderWriter;
|
||||
|
||||
class CxfTestTools {
|
||||
|
||||
static String toJson(JsonMapObject jsonMapObject) {
|
||||
return new JsonMapObjectReaderWriter().toJson(jsonMapObject);
|
||||
}
|
||||
}
|
@ -0,0 +1,144 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.HTTPJwtKeyByOpenIdConnectAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.util.FakeRestRequest;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
public class HTTPJwtKeyByOpenIdConnectAuthenticatorTest {
|
||||
|
||||
protected static MockIpdServer mockIdpServer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
mockIdpServer = new MockIpdServer(TestJwk.Jwks.ALL);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
if (mockIdpServer != null) {
|
||||
try {
|
||||
mockIdpServer.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void basicTest() {
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(
|
||||
ImmutableMap.of("Authorization", TestJwts.MC_COY_SIGNED_OCT_1), new HashMap<String, String>()), null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_AUDIENCE, creds.getAttributes().get("attr.jwt.aud"));
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(3, creds.getAttributes().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void bearerTest() {
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", "Bearer " + TestJwts.MC_COY_SIGNED_OCT_1),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_AUDIENCE, creds.getAttributes().get("attr.jwt.aud"));
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(3, creds.getAttributes().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRoles() throws Exception {
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri())
|
||||
.put("roles_key", TestJwts.ROLES_CLAIM).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(
|
||||
ImmutableMap.of("Authorization", TestJwts.MC_COY_SIGNED_OCT_1), new HashMap<String, String>()), null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_ROLES, creds.getBackendRoles());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExp() throws Exception {
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.MC_COY_EXPIRED_SIGNED_OCT_1),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRS256() throws Exception {
|
||||
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(
|
||||
ImmutableMap.of("Authorization", TestJwts.MC_COY_SIGNED_RSA_1), new HashMap<String, String>()), null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_AUDIENCE, creds.getAttributes().get("attr.jwt.aud"));
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(3, creds.getAttributes().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadSignature() throws Exception {
|
||||
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(new FakeRestRequest(
|
||||
ImmutableMap.of("Authorization", TestJwts.MC_COY_SIGNED_RSA_X), new HashMap<String, String>()), null);
|
||||
|
||||
Assert.assertNull(creds);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,140 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.Socket;
|
||||
import java.security.KeyStore;
|
||||
import java.security.cert.CertificateEncodingException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.http.HttpRequest;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.apache.http.protocol.HttpCoreContext;
|
||||
import org.apache.http.ssl.PrivateKeyDetails;
|
||||
import org.apache.http.ssl.PrivateKeyStrategy;
|
||||
import org.apache.http.ssl.SSLContextBuilder;
|
||||
import org.apache.http.ssl.SSLContexts;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.KeySetRetriever;
|
||||
import com.amazon.dlic.util.SettingsBasedSSLConfigurator;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.helper.file.FileHelper;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.helper.network.SocketUtils;
|
||||
import com.google.common.hash.Hashing;
|
||||
|
||||
public class KeySetRetrieverTest {
|
||||
protected static MockIpdServer mockIdpServer;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
mockIdpServer = new MockIpdServer(TestJwk.Jwks.ALL);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
if (mockIdpServer != null) {
|
||||
try {
|
||||
mockIdpServer.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void cacheTest() {
|
||||
KeySetRetriever keySetRetriever = new KeySetRetriever(mockIdpServer.getDiscoverUri(), null, true);
|
||||
|
||||
keySetRetriever.get();
|
||||
|
||||
Assert.assertEquals(1, keySetRetriever.getOidcCacheMisses());
|
||||
Assert.assertEquals(0, keySetRetriever.getOidcCacheHits());
|
||||
|
||||
keySetRetriever.get();
|
||||
Assert.assertEquals(1, keySetRetriever.getOidcCacheMisses());
|
||||
Assert.assertEquals(1, keySetRetriever.getOidcCacheHits());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void clientCertTest() throws Exception {
|
||||
|
||||
try (MockIpdServer sslMockIdpServer = new MockIpdServer(TestJwk.Jwks.ALL, SocketUtils.findAvailableTcpPort(),
|
||||
true) {
|
||||
@Override
|
||||
protected void handleDiscoverRequest(HttpRequest request, HttpResponse response, HttpContext context)
|
||||
throws HttpException, IOException {
|
||||
|
||||
MockIpdServer.SSLTestHttpServerConnection connection = (MockIpdServer.SSLTestHttpServerConnection) ((HttpCoreContext) context)
|
||||
.getConnection();
|
||||
|
||||
X509Certificate peerCert = (X509Certificate) connection.getPeerCertificates()[0];
|
||||
|
||||
try {
|
||||
String sha256Fingerprint = Hashing.sha256().hashBytes(peerCert.getEncoded()).toString();
|
||||
|
||||
Assert.assertEquals("04b2b8baea7a0a893f0223d95b72081e9a1e154a0f9b1b4e75998085972b1b68",
|
||||
sha256Fingerprint);
|
||||
|
||||
} catch (CertificateEncodingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
super.handleDiscoverRequest(request, response, context);
|
||||
}
|
||||
}) {
|
||||
SSLContextBuilder sslContextBuilder = SSLContexts.custom();
|
||||
|
||||
KeyStore trustStore = KeyStore.getInstance("JKS");
|
||||
InputStream trustStream = new FileInputStream(
|
||||
FileHelper.getAbsoluteFilePathFromClassPath("jwt/truststore.jks").toFile());
|
||||
trustStore.load(trustStream, "changeit".toCharArray());
|
||||
|
||||
KeyStore keyStore = KeyStore.getInstance("JKS");
|
||||
InputStream keyStream = new FileInputStream(
|
||||
FileHelper.getAbsoluteFilePathFromClassPath("jwt/spock-keystore.jks").toFile());
|
||||
|
||||
keyStore.load(keyStream, "changeit".toCharArray());
|
||||
|
||||
sslContextBuilder.loadTrustMaterial(trustStore, null);
|
||||
|
||||
sslContextBuilder.loadKeyMaterial(keyStore, "changeit".toCharArray(), new PrivateKeyStrategy() {
|
||||
|
||||
@Override
|
||||
public String chooseAlias(Map<String, PrivateKeyDetails> aliases, Socket socket) {
|
||||
return "spock";
|
||||
}
|
||||
});
|
||||
|
||||
SettingsBasedSSLConfigurator.SSLConfig sslConfig = new SettingsBasedSSLConfigurator.SSLConfig(
|
||||
sslContextBuilder.build(), new String[] { "TLSv1.2", "TLSv1.1" }, null, null, false, false, false,
|
||||
trustStore, null, keyStore, null, null);
|
||||
|
||||
KeySetRetriever keySetRetriever = new KeySetRetriever(sslMockIdpServer.getDiscoverUri(), sslConfig, false);
|
||||
|
||||
keySetRetriever.get();
|
||||
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,210 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import static com.amazon.dlic.auth.http.jwt.keybyoidc.CxfTestTools.toJson;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.Socket;
|
||||
import java.nio.charset.CharsetDecoder;
|
||||
import java.nio.charset.CharsetEncoder;
|
||||
import java.security.GeneralSecurityException;
|
||||
import java.security.KeyStore;
|
||||
import java.security.cert.Certificate;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.SSLException;
|
||||
import javax.net.ssl.SSLPeerUnverifiedException;
|
||||
import javax.net.ssl.SSLServerSocket;
|
||||
import javax.net.ssl.SSLSocket;
|
||||
import javax.net.ssl.TrustManagerFactory;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKeys;
|
||||
import org.apache.http.HttpConnectionFactory;
|
||||
import org.apache.http.HttpException;
|
||||
import org.apache.http.HttpRequest;
|
||||
import org.apache.http.HttpResponse;
|
||||
import org.apache.http.config.ConnectionConfig;
|
||||
import org.apache.http.config.MessageConstraints;
|
||||
import org.apache.http.entity.ContentLengthStrategy;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.ConnSupport;
|
||||
import org.apache.http.impl.DefaultBHttpServerConnection;
|
||||
import org.apache.http.impl.bootstrap.HttpServer;
|
||||
import org.apache.http.impl.bootstrap.SSLServerSetupHandler;
|
||||
import org.apache.http.impl.bootstrap.ServerBootstrap;
|
||||
import org.apache.http.io.HttpMessageParserFactory;
|
||||
import org.apache.http.io.HttpMessageWriterFactory;
|
||||
import org.apache.http.protocol.HttpContext;
|
||||
import org.apache.http.protocol.HttpRequestHandler;
|
||||
|
||||
import com.amazon.opendistroforelasticsearch.security.test.helper.file.FileHelper;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.helper.network.SocketUtils;
|
||||
|
||||
class MockIpdServer implements Closeable {
|
||||
final static String CTX_DISCOVER = "/discover";
|
||||
final static String CTX_KEYS = "/api/oauth/keys";
|
||||
|
||||
private final HttpServer httpServer;
|
||||
private final int port;
|
||||
private final String uri;
|
||||
private final boolean ssl;
|
||||
private final JsonWebKeys jwks;
|
||||
|
||||
MockIpdServer(JsonWebKeys jwks) throws IOException {
|
||||
this(jwks, SocketUtils.findAvailableTcpPort(), false);
|
||||
}
|
||||
|
||||
MockIpdServer(JsonWebKeys jwks, int port, boolean ssl) throws IOException {
|
||||
this.port = port;
|
||||
this.uri = (ssl ? "https" : "http") + "://localhost:" + port;
|
||||
this.ssl = ssl;
|
||||
this.jwks = jwks;
|
||||
|
||||
ServerBootstrap serverBootstrap = ServerBootstrap.bootstrap().setListenerPort(port)
|
||||
.registerHandler(CTX_DISCOVER, new HttpRequestHandler() {
|
||||
|
||||
@Override
|
||||
public void handle(HttpRequest request, HttpResponse response, HttpContext context)
|
||||
throws HttpException, IOException {
|
||||
|
||||
handleDiscoverRequest(request, response, context);
|
||||
|
||||
}
|
||||
}).registerHandler(CTX_KEYS, new HttpRequestHandler() {
|
||||
|
||||
@Override
|
||||
public void handle(HttpRequest request, HttpResponse response, HttpContext context)
|
||||
throws HttpException, IOException {
|
||||
|
||||
handleKeysRequest(request, response, context);
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
if (ssl) {
|
||||
serverBootstrap = serverBootstrap.setSslContext(createSSLContext())
|
||||
.setSslSetupHandler(new SSLServerSetupHandler() {
|
||||
|
||||
@Override
|
||||
public void initialize(SSLServerSocket socket) throws SSLException {
|
||||
socket.setNeedClientAuth(true);
|
||||
}
|
||||
}).setConnectionFactory(new HttpConnectionFactory<DefaultBHttpServerConnection>() {
|
||||
|
||||
private ConnectionConfig cconfig = ConnectionConfig.DEFAULT;
|
||||
|
||||
@Override
|
||||
public DefaultBHttpServerConnection createConnection(final Socket socket) throws IOException {
|
||||
final SSLTestHttpServerConnection conn = new SSLTestHttpServerConnection(
|
||||
this.cconfig.getBufferSize(), this.cconfig.getFragmentSizeHint(),
|
||||
ConnSupport.createDecoder(this.cconfig), ConnSupport.createEncoder(this.cconfig),
|
||||
this.cconfig.getMessageConstraints(), null, null, null, null);
|
||||
conn.bind(socket);
|
||||
return conn;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
this.httpServer = serverBootstrap.create();
|
||||
|
||||
httpServer.start();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
httpServer.stop();
|
||||
}
|
||||
|
||||
public HttpServer getHttpServer() {
|
||||
return httpServer;
|
||||
}
|
||||
|
||||
public String getUri() {
|
||||
return uri;
|
||||
}
|
||||
|
||||
public String getDiscoverUri() {
|
||||
return uri + CTX_DISCOVER;
|
||||
}
|
||||
|
||||
public int getPort() {
|
||||
return port;
|
||||
}
|
||||
|
||||
protected void handleDiscoverRequest(HttpRequest request, HttpResponse response, HttpContext context)
|
||||
throws HttpException, IOException {
|
||||
response.setStatusCode(200);
|
||||
response.setHeader("Cache-Control", "public, max-age=31536000");
|
||||
response.setEntity(new StringEntity("{\"jwks_uri\": \"" + uri + CTX_KEYS + "\",\n" + "\"issuer\": \"" + uri
|
||||
+ "\", \"unknownPropertyToBeIgnored\": 42}"));
|
||||
}
|
||||
|
||||
protected void handleKeysRequest(HttpRequest request, HttpResponse response, HttpContext context)
|
||||
throws HttpException, IOException {
|
||||
response.setStatusCode(200);
|
||||
response.setEntity(new StringEntity(toJson(jwks)));
|
||||
}
|
||||
|
||||
private SSLContext createSSLContext() {
|
||||
if (!this.ssl) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
final TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
|
||||
final KeyStore trustStore = KeyStore.getInstance("JKS");
|
||||
InputStream trustStream = new FileInputStream(
|
||||
FileHelper.getAbsoluteFilePathFromClassPath("jwt/truststore.jks").toFile());
|
||||
trustStore.load(trustStream, "changeit".toCharArray());
|
||||
tmf.init(trustStore);
|
||||
|
||||
final KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
|
||||
final KeyStore keyStore = KeyStore.getInstance("JKS");
|
||||
InputStream keyStream = new FileInputStream(
|
||||
FileHelper.getAbsoluteFilePathFromClassPath("jwt/node-0-keystore.jks").toFile());
|
||||
|
||||
keyStore.load(keyStream, "changeit".toCharArray());
|
||||
kmf.init(keyStore, "changeit".toCharArray());
|
||||
|
||||
SSLContext sslContext = SSLContext.getInstance("TLSv1.2");
|
||||
sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null);
|
||||
return sslContext;
|
||||
} catch (GeneralSecurityException | IOException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
static class SSLTestHttpServerConnection extends DefaultBHttpServerConnection {
|
||||
public SSLTestHttpServerConnection(final int buffersize, final int fragmentSizeHint,
|
||||
final CharsetDecoder chardecoder, final CharsetEncoder charencoder,
|
||||
final MessageConstraints constraints, final ContentLengthStrategy incomingContentStrategy,
|
||||
final ContentLengthStrategy outgoingContentStrategy,
|
||||
final HttpMessageParserFactory<HttpRequest> requestParserFactory,
|
||||
final HttpMessageWriterFactory<HttpResponse> responseWriterFactory) {
|
||||
super(buffersize, fragmentSizeHint, chardecoder, charencoder, constraints, incomingContentStrategy,
|
||||
outgoingContentStrategy, requestParserFactory, responseWriterFactory);
|
||||
}
|
||||
|
||||
public Certificate[] getPeerCertificates() throws SSLPeerUnverifiedException {
|
||||
return ((SSLSocket) getSocket()).getSession().getPeerCertificates();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,130 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.Future;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKeys;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.AuthenticatorUnavailableException;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.BadCredentialsException;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.KeySetProvider;
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.SelfRefreshingKeySet;
|
||||
|
||||
public class SelfRefreshingKeySetTest {
|
||||
|
||||
@Test
|
||||
public void basicTest() throws AuthenticatorUnavailableException, BadCredentialsException {
|
||||
SelfRefreshingKeySet selfRefreshingKeySet = new SelfRefreshingKeySet(new MockKeySetProvider());
|
||||
|
||||
JsonWebKey key1 = selfRefreshingKeySet.getKey("kid_a");
|
||||
Assert.assertEquals(TestJwk.OCT_1_K, key1.getProperty("k"));
|
||||
Assert.assertEquals(1, selfRefreshingKeySet.getRefreshCount());
|
||||
|
||||
JsonWebKey key2 = selfRefreshingKeySet.getKey("kid_b");
|
||||
Assert.assertEquals(TestJwk.OCT_2_K, key2.getProperty("k"));
|
||||
Assert.assertEquals(1, selfRefreshingKeySet.getRefreshCount());
|
||||
|
||||
try {
|
||||
selfRefreshingKeySet.getKey("kid_X");
|
||||
Assert.fail("Expected a BadCredentialsException");
|
||||
} catch (BadCredentialsException e) {
|
||||
Assert.assertEquals(2, selfRefreshingKeySet.getRefreshCount());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Test(timeout = 10000)
|
||||
public void twoThreadedTest() throws Exception {
|
||||
BlockingMockKeySetProvider provider = new BlockingMockKeySetProvider();
|
||||
|
||||
final SelfRefreshingKeySet selfRefreshingKeySet = new SelfRefreshingKeySet(provider);
|
||||
|
||||
ExecutorService executorService = Executors.newCachedThreadPool();
|
||||
|
||||
Future<JsonWebKey> f1 = executorService.submit(() -> selfRefreshingKeySet.getKey("kid_a"));
|
||||
|
||||
provider.waitForCalled();
|
||||
|
||||
Future<JsonWebKey> f2 = executorService.submit(() -> selfRefreshingKeySet.getKey("kid_b"));
|
||||
|
||||
while (selfRefreshingKeySet.getQueuedGetCount() == 0) {
|
||||
Thread.sleep(10);
|
||||
}
|
||||
|
||||
provider.unblock();
|
||||
|
||||
Assert.assertEquals(TestJwk.OCT_1_K, f1.get().getProperty("k"));
|
||||
Assert.assertEquals(TestJwk.OCT_2_K, f2.get().getProperty("k"));
|
||||
|
||||
Assert.assertEquals(1, selfRefreshingKeySet.getRefreshCount());
|
||||
Assert.assertEquals(1, selfRefreshingKeySet.getQueuedGetCount());
|
||||
|
||||
}
|
||||
|
||||
static class MockKeySetProvider implements KeySetProvider {
|
||||
|
||||
@Override
|
||||
public JsonWebKeys get() throws AuthenticatorUnavailableException {
|
||||
return TestJwk.OCT_1_2_3;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class BlockingMockKeySetProvider extends MockKeySetProvider {
|
||||
private boolean blocked = true;
|
||||
private boolean called = false;
|
||||
|
||||
@Override
|
||||
public synchronized JsonWebKeys get() throws AuthenticatorUnavailableException {
|
||||
|
||||
called = true;
|
||||
notifyAll();
|
||||
|
||||
waitForUnblock();
|
||||
|
||||
return super.get();
|
||||
}
|
||||
|
||||
public synchronized void unblock() {
|
||||
blocked = false;
|
||||
notifyAll();
|
||||
}
|
||||
|
||||
public synchronized void waitForCalled() throws InterruptedException {
|
||||
while (!called) {
|
||||
wait();
|
||||
}
|
||||
}
|
||||
|
||||
private synchronized void waitForUnblock() {
|
||||
while (blocked) {
|
||||
try {
|
||||
wait();
|
||||
} catch (InterruptedException e) {
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,161 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.util.HashMap;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.amazon.dlic.auth.http.jwt.keybyoidc.HTTPJwtKeyByOpenIdConnectAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.util.FakeRestRequest;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
public class SingleKeyHTTPJwtKeyByOpenIdConnectAuthenticatorTest {
|
||||
|
||||
@Test
|
||||
public void basicTest() throws Exception {
|
||||
MockIpdServer mockIdpServer = new MockIpdServer(TestJwk.Jwks.RSA_1);
|
||||
try {
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.MC_COY_SIGNED_RSA_1),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_AUDIENCE, creds.getAttributes().get("attr.jwt.aud"));
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(3, creds.getAttributes().size());
|
||||
|
||||
} finally {
|
||||
try {
|
||||
mockIdpServer.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void wrongSigTest() throws Exception {
|
||||
MockIpdServer mockIdpServer = new MockIpdServer(TestJwk.Jwks.RSA_1);
|
||||
try {
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.NoKid.MC_COY_SIGNED_RSA_X),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNull(creds);
|
||||
|
||||
} finally {
|
||||
try {
|
||||
mockIdpServer.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void keyExchangeTest() throws Exception {
|
||||
MockIpdServer mockIdpServer = new MockIpdServer(TestJwk.Jwks.RSA_1);
|
||||
|
||||
Settings settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build();
|
||||
|
||||
HTTPJwtKeyByOpenIdConnectAuthenticator jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
try {
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.NoKid.MC_COY_SIGNED_RSA_1),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_AUDIENCE, creds.getAttributes().get("attr.jwt.aud"));
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(3, creds.getAttributes().size());
|
||||
|
||||
creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.NoKid.MC_COY_SIGNED_RSA_2),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNull(creds);
|
||||
|
||||
creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.NoKid.MC_COY_SIGNED_RSA_X),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNull(creds);
|
||||
|
||||
creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.NoKid.MC_COY_SIGNED_RSA_1),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_AUDIENCE, creds.getAttributes().get("attr.jwt.aud"));
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(3, creds.getAttributes().size());
|
||||
|
||||
} finally {
|
||||
try {
|
||||
mockIdpServer.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
mockIdpServer = new MockIpdServer(TestJwk.Jwks.RSA_2);
|
||||
settings = Settings.builder().put("openid_connect_url", mockIdpServer.getDiscoverUri()).build(); //port changed
|
||||
jwtAuth = new HTTPJwtKeyByOpenIdConnectAuthenticator(settings, null);
|
||||
|
||||
try {
|
||||
AuthCredentials creds = jwtAuth.extractCredentials(
|
||||
new FakeRestRequest(ImmutableMap.of("Authorization", TestJwts.NoKid.MC_COY_SIGNED_RSA_2),
|
||||
new HashMap<String, String>()),
|
||||
null);
|
||||
|
||||
Assert.assertNotNull(creds);
|
||||
Assert.assertEquals(TestJwts.MCCOY_SUBJECT, creds.getUsername());
|
||||
Assert.assertEquals(TestJwts.TEST_AUDIENCE, creds.getAttributes().get("attr.jwt.aud"));
|
||||
Assert.assertEquals(0, creds.getBackendRoles().size());
|
||||
Assert.assertEquals(3, creds.getAttributes().size());
|
||||
|
||||
} finally {
|
||||
try {
|
||||
mockIdpServer.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,111 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKeys;
|
||||
import org.apache.cxf.rs.security.jose.jwk.KeyType;
|
||||
import org.apache.cxf.rs.security.jose.jwk.PublicKeyUse;
|
||||
|
||||
class TestJwk {
|
||||
|
||||
// Keys generated with https://mkjwk.org/
|
||||
|
||||
static final String OCT_1_K = "eTDZjSqRD9Abhod9iqeGX_7o93a-eElTeXWAF6FmzQshmRIrPD-C9ET3pFjJ_IBrzmWIZDk8ig-X_PIyGmKsxNMsrU-0BNWF5gJq5xOp4rYTl8z66Tw9wr8tHLxLxgJqkLSuUCRBZvlZlQ7jNdhBBxgM-hdSSzsN1T33qdIwhrUeJ-KXI5yKUXHjoWFYb9tETbYQ4NvONowkCsXK_flp-E3F_OcKe_z5iVUszAV8QfCod1zhbya540kDejXCL6N_XMmhWJqum7UJ3hgf6DEtroPSnVpHt4iR5w9ArKK-IBgluPght03gNcoNqwz7p77TFbdOmUKF_PWy1bcdbaUoSg";
|
||||
static final String OCT_2_K = "YP6Q3IF2qJEagV948dsicXKpG43Ci2W7ZxUpiVTBLZr1vFN9ZGUKxeXGgVWuMFYTmoHvv5AOC8BvoNOpcE3rcJNuNOqTMdujxD92CxjOykiLEKQ0Te_7xQ4LnSQjlqdIJ4U3S7qCnJLd1LxhKOGZcUhE_pjhwf7q2RUUpvC3UOyZZLog9yeflnp9nqqDy5yVqRYWZRcPI06kJTh3Z8IFi2JRJV14iUFQtOHQKuyJRMcsldKnfWl7YW3JdQ9IRN-c1lEYSEBmsavEejcqHZkbli2svqLfmCBJVWffXDRxhq0_VafiL83HC0bP9qeNKivhemw6foVmg8UMs7yJ6ao02A";
|
||||
static final String OCT_3_K = "r3aeW3OK7-B4Hs3hq9BmlT1D3jRiolH9PL82XUz9xAS7dniAdmvMnN5GkOc1vqibOe2T-CC_103UglDm9D0iU9S9zn6wTuQt1L5wfZIoHd9f5IjJ_YFEzZMvsoUY_-ji_0K_ugVvBPwi9JnBQHHS4zrgmP06dGjmcnZDcIf4W_iFas3lDYSXilL1V2QhNaynpSqTarpfBGSphKv4Zg2JhsX8xB0VSaTlEq4lF8pzvpWSxXCW9CtomhB80daSuTizrmSTEPpdN3XzQ2-Tovo1ieMOfDU4csvjEk7Bwc2ThjpnA8ucKQUYpUv9joBxKuCdUltssthWnetrogjYOn_xGA";
|
||||
|
||||
static final JsonWebKey OCT_1 = createOct("kid_a", "HS256", OCT_1_K);
|
||||
static final JsonWebKey OCT_2 = createOct("kid_b", "HS256", OCT_2_K);
|
||||
static final JsonWebKey OCT_3 = createOct("kid_c", "HS256", OCT_3_K);
|
||||
|
||||
static final JsonWebKeys OCT_1_2_3 = createJwks(OCT_1, OCT_2, OCT_3);
|
||||
|
||||
static final String RSA_1_D = "On8XGMmdM5Fm5hvuhQk-qAkIP2CoK5QMx0OH5m_WDzKXZv8lZ2eg89I4ehBiOKGdw1h_mjmWwTah-evpXV-BF5QpejPQqxkXS-8s5r2AvietQq32jl-gwIwZWTvfzjpT9On0YJZ4q01tMDj3r-YOLUW2xrz3za9tl6pPU_5kP63C-hoj1ybTwcC7ujbCPwhY6yAopMA1v10uVmCxsjsNikEjB6YePgHixez51wO3Z8mXNwefWukFWYJ5T7t4kHMSf5P_8FJZ14u5yvYZnngE_tJCyHFdIDb6UWsrgxomtlQU-SdZYK_NY6gw6mCkjjlqOoYqlsrRJ16kJ81Ds269oQ";
|
||||
static final String RSA_1_N = "hMSoV74FRtoaU7xpp0llsXbHE4oUseKoSNga-C_YIXuoGc3pajHh1WtJppZQNYM1Xy07nHchLJAdgqL2_q_Lk8cFHmmL1KTjwPflK9zZ9C0-8QTOrrqU9vkp3gT00jWWJ0HJbUvXIGxPGPnxoJoI--ToE0EWsYEWqWyx1TqYol--oUUPlY5r7vXRKIn5UZNz6VGkW8nI4fXaqDUpXH9uVM9A-nJX2B0Xjwu3VOn2zrgkCZeGTHjNgfLISOTFe9m8lHWLKcuxOWPuCZyCN0C6ZdWB1YP2NhxYFQwQfGV8yfnTImgL-DuV4WPSRVj7W_GJr213-oXBrBR0CnQEPbi_3w";
|
||||
static final String RSA_1_E = "AQAB";
|
||||
|
||||
static final String RSA_2_D = "QQ18k_buZHOSVYzkXL1FaqdodZVNZ_hrBtDcmCVUYjm3dfDVQYt70h8LUdLUCSUA2-_VEwqVdQ-L2FTg7NZVvZJXIyQXp3yrdY1vGKebs3oaIB_VQT8jt-64s12r_8V2ksK2myRrvfm2Fgqi32H5QkspuaQYb9s4NJwKSk7mVAz5dRWQdCx9JNVWknWDJxgHzh3Uku1tNwUOyvSYcRnSZ9X7oWNHaHkSGLEYE_mxD7YXs6HEdCDwc3WuvR5AiVKg2OGec0lL1hY_AWX5UxnR00mhAa0qPytFfaPe-Sc5tQ5regQRqRNDyDESVGIvqXsY8ePjZPOFyoxrcJ2wN3bt4Q";
|
||||
static final String RSA_2_N = "lt4EID7tbrE9E8l7VfVGhiwSx4O8nLO5AZo5pJNE1fUy4bM56wH_DeU3YspXh0UvH-vcn4uKjhwJdOCjzalBc2wXD0aRd3JXzWwbjveo6oBFz6kU7VnY8nFMYLMlb6FDcl066OZOtW4PIFtAStXj5rX_J94He3sfTClodpNljTi4qeQwoNsrnZ5Eq82pCp20zCgvbdes8HQBq_QgApvzhL3c-PXd2I_4pBnaPoZwAnufthk7-v8V0Zf5CrDuqEczKKr38pvwggnxZqsfUy2X0bXPBvDXh5B2ljWxWl8tHJbKXzOhfV5Nx5rllJnNabFoVxh3hnlxdOZ88zcaslWBLQ";
|
||||
static final String RSA_2_E = "AQAB";
|
||||
|
||||
static final String RSA_X_D = "iXym57VmwbWvcHtf--xSDPTagEJdnceuErjH6lbuabFXeBx42ZpuAICvo6_YpMcqLybD37ArIu2SD5J_ZBALp4v4KecMPFI5lZr7GKlGgqnForvcC7EWA_ZtZ9uY746cKun8NtemcOlAenn2dvc9NP2S4JtE3FHxmqs2MMmz-ki-ar8-zu0j0HLPLl_Wj2SZ9yCeFmmH3eocX5IRRiWwPnudQJM2t0kt9V-M88YzobqzoMEoFjTfi-owa-w6xGAgJxAUKk02vTiTivH3Qmkk-uAXyj-VtcyzYXD74ICN8EplcAEUKegDR59T4-u18GdpDbPU20XzxDaO4lZiQ7TIEQ";
|
||||
static final String RSA_X_N = "jDDVUMXOXDVcaRVAT5TtuiAsLxk7XAAwyyECfmySZul7D5XVLMtGe6rP2900q3nM4BaCEiuwXjmTCZDAGlFGs2a3eQ1vbBSv9_0KGHL-gZGFPNiv0v8aR7QzZ-abhGnRy5F52PlTWsypGgG_kQpF2t2TBotvYhvVPagAt4ljllDKvY1siOvS3nh4TqcUtWcbgQZEWPmaXuhx0eLmhQJca7UEw99YlGNew48AEzt7ZnfU0Qkz3JwSz7IcPx-NfIh6BN6LwAg_ASdoM3MR8rDOtLYavmJVhutrfOpE-4-fw1mf3eLYu7xrxIplSiOIsHunTUssnTiBkXAaGqGJs604Pw";
|
||||
static final String RSA_X_E = "AQAB";
|
||||
|
||||
static final JsonWebKey RSA_1 = createRsa("kid_1", "RS256", RSA_1_E, RSA_1_N, RSA_1_D);
|
||||
static final JsonWebKey RSA_1_PUBLIC = createRsaPublic("kid_1", "RS256", RSA_1_E, RSA_1_N);
|
||||
|
||||
static final JsonWebKey RSA_2 = createRsa("kid_2", "RS256", RSA_2_E, RSA_2_N, RSA_2_D);
|
||||
static final JsonWebKey RSA_2_PUBLIC = createRsaPublic("kid_2", "RS256", RSA_2_E, RSA_2_N);
|
||||
|
||||
static final JsonWebKey RSA_X = createRsa("kid_2", "RS256", RSA_X_E, RSA_X_N, RSA_X_D);
|
||||
static final JsonWebKey RSA_X_PUBLIC = createRsaPublic("kid_2", "RS256", RSA_X_E, RSA_X_N);
|
||||
|
||||
static final JsonWebKeys RSA_1_2_PUBLIC = createJwks(RSA_1_PUBLIC, RSA_2_PUBLIC);
|
||||
|
||||
static class Jwks {
|
||||
static final JsonWebKeys ALL = createJwks(OCT_1, OCT_2, OCT_3, RSA_1_PUBLIC, RSA_2_PUBLIC);
|
||||
static final JsonWebKeys RSA_1 = createJwks(RSA_1_PUBLIC);
|
||||
static final JsonWebKeys RSA_2 = createJwks(RSA_2_PUBLIC);
|
||||
}
|
||||
|
||||
|
||||
private static JsonWebKey createOct(String keyId, String algorithm, String k) {
|
||||
JsonWebKey result = new JsonWebKey();
|
||||
|
||||
result.setKeyId(keyId);
|
||||
result.setKeyType(KeyType.OCTET);
|
||||
result.setAlgorithm(algorithm);
|
||||
result.setPublicKeyUse(PublicKeyUse.SIGN);
|
||||
result.setProperty("k", k);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static JsonWebKey createRsa(String keyId, String algorithm, String e, String n, String d) {
|
||||
JsonWebKey result = new JsonWebKey();
|
||||
|
||||
result.setKeyId(keyId);
|
||||
result.setKeyType(KeyType.RSA);
|
||||
result.setAlgorithm(algorithm);
|
||||
result.setPublicKeyUse(PublicKeyUse.SIGN);
|
||||
|
||||
if (d != null) {
|
||||
result.setProperty("d", d);
|
||||
}
|
||||
|
||||
result.setProperty("e", e);
|
||||
result.setProperty("n", n);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static JsonWebKey createRsaPublic(String keyId, String algorithm, String e, String n) {
|
||||
return createRsa(keyId, algorithm, e, n, null);
|
||||
}
|
||||
|
||||
private static JsonWebKeys createJwks(JsonWebKey... array) {
|
||||
JsonWebKeys result = new JsonWebKeys();
|
||||
|
||||
result.setKeys(Arrays.asList(array));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,91 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.jwt.keybyoidc;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jwk.JsonWebKey;
|
||||
import org.apache.cxf.rs.security.jose.jws.JwsHeaders;
|
||||
import org.apache.cxf.rs.security.jose.jws.JwsUtils;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JoseJwtProducer;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtClaims;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtConstants;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtToken;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
|
||||
class TestJwts {
|
||||
static final String ROLES_CLAIM = "roles";
|
||||
static final Set<String> TEST_ROLES = ImmutableSet.of("role1", "role2");
|
||||
static final String TEST_ROLES_STRING = Strings.join(TEST_ROLES, ',');
|
||||
|
||||
static final String TEST_AUDIENCE = "TestAudience";
|
||||
|
||||
static final String MCCOY_SUBJECT = "Leonard McCoy";
|
||||
|
||||
static final JwtToken MC_COY = create(MCCOY_SUBJECT, TEST_AUDIENCE, ROLES_CLAIM, TEST_ROLES_STRING);
|
||||
|
||||
static final JwtToken MC_COY_EXPIRED = create(MCCOY_SUBJECT, TEST_AUDIENCE, ROLES_CLAIM, TEST_ROLES_STRING,
|
||||
JwtConstants.CLAIM_EXPIRY, 10);
|
||||
|
||||
static final String MC_COY_SIGNED_OCT_1 = createSigned(MC_COY, TestJwk.OCT_1);
|
||||
|
||||
static final String MC_COY_SIGNED_RSA_1 = createSigned(MC_COY, TestJwk.RSA_1);
|
||||
|
||||
static final String MC_COY_SIGNED_RSA_X = createSigned(MC_COY, TestJwk.RSA_X);
|
||||
|
||||
static final String MC_COY_EXPIRED_SIGNED_OCT_1 = createSigned(MC_COY_EXPIRED, TestJwk.OCT_1);
|
||||
|
||||
static class NoKid {
|
||||
static final String MC_COY_SIGNED_RSA_1 = createSignedWithoutKeyId(MC_COY, TestJwk.RSA_1);
|
||||
static final String MC_COY_SIGNED_RSA_2 = createSignedWithoutKeyId(MC_COY, TestJwk.RSA_2);
|
||||
static final String MC_COY_SIGNED_RSA_X = createSignedWithoutKeyId(MC_COY, TestJwk.RSA_X);
|
||||
}
|
||||
|
||||
static JwtToken create(String subject, String audience, Object... moreClaims) {
|
||||
JwtClaims claims = new JwtClaims();
|
||||
|
||||
claims.setSubject(subject);
|
||||
claims.setAudience(audience);
|
||||
|
||||
if (moreClaims != null) {
|
||||
for (int i = 0; i < moreClaims.length; i += 2) {
|
||||
claims.setClaim(String.valueOf(moreClaims[i]), moreClaims[i + 1]);
|
||||
}
|
||||
}
|
||||
|
||||
JwtToken result = new JwtToken(claims);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static String createSigned(JwtToken baseJwt, JsonWebKey jwk) {
|
||||
JwsHeaders jwsHeaders = new JwsHeaders();
|
||||
JwtToken signedToken = new JwtToken(jwsHeaders, baseJwt.getClaims());
|
||||
|
||||
jwsHeaders.setKeyId(jwk.getKeyId());
|
||||
|
||||
return new JoseJwtProducer().processJwt(signedToken, null, JwsUtils.getSignatureProvider(jwk));
|
||||
}
|
||||
|
||||
static String createSignedWithoutKeyId(JwtToken baseJwt, JsonWebKey jwk) {
|
||||
JwsHeaders jwsHeaders = new JwsHeaders();
|
||||
JwtToken signedToken = new JwtToken(jwsHeaders, baseJwt.getClaims());
|
||||
|
||||
return new JoseJwtProducer().processJwt(signedToken, null, JwsUtils.getSignatureProvider(jwk));
|
||||
}
|
||||
}
|
@ -0,0 +1,569 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.http.saml;
|
||||
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.security.KeyStore;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.security.PrivateKey;
|
||||
import java.security.UnrecoverableKeyException;
|
||||
import java.security.cert.CertificateException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.net.ssl.KeyManagerFactory;
|
||||
|
||||
import org.apache.cxf.rs.security.jose.jws.JwsJwtCompactConsumer;
|
||||
import org.apache.cxf.rs.security.jose.jwt.JwtToken;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.RestChannel;
|
||||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.RestRequest.Method;
|
||||
import org.elasticsearch.rest.RestResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.opensaml.saml.saml2.core.NameIDType;
|
||||
|
||||
import com.amazon.dlic.auth.http.saml.HTTPSamlAuthenticator;
|
||||
import com.amazon.opendistroforelasticsearch.security.DefaultObjectMapper;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.helper.file.FileHelper;
|
||||
import com.amazon.opendistroforelasticsearch.security.user.AuthCredentials;
|
||||
import com.amazon.opendistroforelasticsearch.security.util.FakeRestRequest;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
public class HTTPSamlAuthenticatorTest {
|
||||
protected static MockSamlIdpServer mockSamlIdpServer;
|
||||
private static final Pattern WWW_AUTHENTICATE_PATTERN = Pattern
|
||||
.compile("([^\\s]+)\\s*([^\\s=]+)=\"([^\"]+)\"\\s*([^\\s=]+)=\"([^\"]+)\"\\s*([^\\s=]+)=\"([^\"]+)\"\\s*");
|
||||
|
||||
private static final String SPOCK_KEY = "-----BEGIN ENCRYPTED PRIVATE KEY-----\n"
|
||||
+ "MIIE6TAbBgkqhkiG9w0BBQMwDgQI0JMa7PyPedwCAggABIIEyLdPL2RXj8jjKqFT\n"
|
||||
+ "p+7vywwyxyUQOQvvIIU6H+lKZPd/y6pxzYtGd1suT2aermrrlh4b/ZXXfj/EcKcw\n"
|
||||
+ "GgcXB60Kr7UHIv7Xr498S4EKa9R7UG0NtWtsA3FVR5ndwXI+CiRSShhkskmpseVH\n"
|
||||
+ "dNWAoUsKQFbZRLnoINMKIw1/lpQBUwAUcYVB7LxLeKSTVHn/h9kvq0tad1kbE5OY\n"
|
||||
+ "GnOLEVW311++XQ3Ep/13tGEZCrxef+QsnmXuYxXBq4RvbyGZOvyM2FC7va8KzJxl\n"
|
||||
+ "P38SPEL1TzqokQB/eLDBMBOCqkhTbP/8lNuoEVm44T6//ijBp6VdBB+YRIFh3NrS\n"
|
||||
+ "1fPuDVgHr1jrRGICe8lzWy/bSa+4FlxYjn5qpEzZQtbC6C+iRzlwtlCiDdKl8zJ1\n"
|
||||
+ "YF80OW9Gr3Kvph2LJukBiODcyWUAsAf5vJH3vfPV4T9kWTNMu2NCy3Ch8u9d906k\n"
|
||||
+ "zojB/tRRdZ/XCftkU05gYU/5ruU1YA49U60s0KWXvSLmecFo2SjkcEoPDI+Y80Uw\n"
|
||||
+ "OB/5kdh1M1uu/qjoJTPWBbZ28L6e0fiMsr7eWSG7PQFwnN6VzY6Oesm8AS8LMe3V\n"
|
||||
+ "Dr4Syec8vVfGg/EDsjNC1yeZTzlO66NQYGkpnHwK1kgX/XXe7fjDfztPyM9crBXj\n"
|
||||
+ "YcYpNULAkMj9QUVDQqQ7L8TjoAFQiSdvNa+kkDhaxnAXoxfqeacTtkpKcHADsAQL\n"
|
||||
+ "azfoyflnpuZ1dIn0noRFsVuguKDp4k990bhXu9RkQ1H5IzIoYqJwypacVdt3m74o\n"
|
||||
+ "jpZvBY6z0EtBNkze6WA0Vj0BSWpy/IzndDwroG4Xf+54hn0R/Tp5K5UNttOaJN8c\n"
|
||||
+ "9U/NTiGJTJg1O4x6xbPD7C5bBdoJ/MH5yJuk/dUc7pVkisLpuH9sAPETjYCdFIjX\n"
|
||||
+ "MSRJCtq2ouT0ZRW1yBIrKIadgHLExhjZjTSQCBXJMbO7r2DjPHMZU23GTiPtC8ua\n"
|
||||
+ "L2BmC+AW7RQ2Fyo3hJDT2TM4XlMMlTtGuFxkWwmjV+FiwfjbiR3cp0+99/X6OFu5\n"
|
||||
+ "ysgZLuTMQsmWNJ8ZARZqBnkGnN92Aw4D5GLCFv3QXO+fqJnOP1PbkPwpjq59Yytf\n"
|
||||
+ "U4XqyTwRYSXRzwPFFb7RcgL9HbmjpRBEnvqEjKYeXxkBnhs+WOWN/PuJzGgP5uAk\n"
|
||||
+ "jAjQbtgLEPd4WpGcwEhkX6S1DBi8NrGapuehCjXsN1axify8Kx4eRuTiPdINlgsq\n"
|
||||
+ "d2MsPIuDgU2+0QXrXjRLwABcMGuKcmmfZjC+zZomj+yr4+Togs3vhSj9yGK3HHMh\n"
|
||||
+ "NgOlPBTibruXXa4AI07c28j3sEry+CMZrUGyYg6o1HLBpBfOmp7V5HJcvkMFWCVy\n"
|
||||
+ "DPFm5LZu0jZMDj9a+oGkv4hfp1xSXSUjhjiGz47xFJb6PH9pOUIkhTEdFCgEXbaR\n"
|
||||
+ "fXcR+kakLOotL4X1cT9cpxdimN3CCTBpr03gCv2NCVYMYhHKHK+CQVngJrY+PzMH\n"
|
||||
+ "q6fw81bUNcixZyeXFfLFN6GK75k51UV7YS/X2H8YkqGeIVNaFjrcqUoVAN8jQOeb\n"
|
||||
+ "XXIa8gT/MdNT0+W3NHKcbE31pDhOI92COZWlhOyp1cLhyo1ytayjxPTl/2RM/Vtj\n" + "T9IKkp7810LOKhrCDQ==\n"
|
||||
+ "-----END ENCRYPTED PRIVATE KEY-----";
|
||||
|
||||
private static X509Certificate spSigningCertificate;
|
||||
private static PrivateKey spSigningPrivateKey;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
mockSamlIdpServer = new MockSamlIdpServer();
|
||||
initSpSigningKeys();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() {
|
||||
if (mockSamlIdpServer != null) {
|
||||
try {
|
||||
mockSamlIdpServer.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void basicTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthenticateHeaders authenticateHeaders = getAutenticateHeaders(samlAuthenticator);
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.handleSsoGetRequestURI(authenticateHeaders.location);
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders);
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
String responseJson = new String(BytesReference.toBytes(tokenRestChannel.response.content()));
|
||||
HashMap<String, Object> response = DefaultObjectMapper.objectMapper.readValue(responseJson,
|
||||
new TypeReference<HashMap<String, Object>>() {
|
||||
});
|
||||
String authorization = (String) response.get("authorization");
|
||||
|
||||
Assert.assertNotNull("Expected authorization attribute in JSON: " + responseJson, authorization);
|
||||
|
||||
JwsJwtCompactConsumer jwtConsumer = new JwsJwtCompactConsumer(authorization.replaceAll("\\s*bearer\\s*", ""));
|
||||
JwtToken jwt = jwtConsumer.getJwtToken();
|
||||
|
||||
Assert.assertEquals("horst", jwt.getClaim("sub"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void unsolicitedSsoTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
mockSamlIdpServer.setDefaultAssertionConsumerService("http://wherever/opendistrosecurity/saml/acs/idpinitiated");
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.createUnsolicitedSamlResponse();
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, null,
|
||||
"/opendistrosecurity/saml/acs/idpinitiated");
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
String responseJson = new String(BytesReference.toBytes(tokenRestChannel.response.content()));
|
||||
HashMap<String, Object> response = DefaultObjectMapper.objectMapper.readValue(responseJson,
|
||||
new TypeReference<HashMap<String, Object>>() {
|
||||
});
|
||||
String authorization = (String) response.get("authorization");
|
||||
|
||||
Assert.assertNotNull("Expected authorization attribute in JSON: " + responseJson, authorization);
|
||||
|
||||
JwsJwtCompactConsumer jwtConsumer = new JwsJwtCompactConsumer(authorization.replaceAll("\\s*bearer\\s*", ""));
|
||||
JwtToken jwt = jwtConsumer.getJwtToken();
|
||||
|
||||
Assert.assertEquals("horst", jwt.getClaim("sub"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void badUnsolicitedSsoTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
mockSamlIdpServer.setDefaultAssertionConsumerService("http://wherever/opendistrosecurity/saml/acs/idpinitiated");
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.createUnsolicitedSamlResponse();
|
||||
|
||||
AuthenticateHeaders authenticateHeaders = new AuthenticateHeaders("http://wherever/opendistrosecurity/saml/acs/",
|
||||
"wrong_request_id");
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders,
|
||||
"/opendistrosecurity/saml/acs/idpinitiated");
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
Assert.assertEquals(RestStatus.UNAUTHORIZED, tokenRestChannel.response.status());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void wrongCertTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthenticateHeaders authenticateHeaders = getAutenticateHeaders(samlAuthenticator);
|
||||
|
||||
mockSamlIdpServer.loadSigningKeys("saml/spock-keystore.jks", "spock");
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.handleSsoGetRequestURI(authenticateHeaders.location);
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders);
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
Assert.assertEquals(401, tokenRestChannel.response.status().getStatus());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void noSignatureTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(false);
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthenticateHeaders authenticateHeaders = getAutenticateHeaders(samlAuthenticator);
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.handleSsoGetRequestURI(authenticateHeaders.location);
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders);
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
Assert.assertEquals(401, tokenRestChannel.response.status().getStatus());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void rolesTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setAuthenticateUserRoles(Arrays.asList("a", "b"));
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthenticateHeaders authenticateHeaders = getAutenticateHeaders(samlAuthenticator);
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.handleSsoGetRequestURI(authenticateHeaders.location);
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders);
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
String responseJson = new String(BytesReference.toBytes(tokenRestChannel.response.content()));
|
||||
HashMap<String, Object> response = DefaultObjectMapper.objectMapper.readValue(responseJson,
|
||||
new TypeReference<HashMap<String, Object>>() {
|
||||
});
|
||||
String authorization = (String) response.get("authorization");
|
||||
|
||||
Assert.assertNotNull("Expected authorization attribute in JSON: " + responseJson, authorization);
|
||||
|
||||
JwsJwtCompactConsumer jwtConsumer = new JwsJwtCompactConsumer(authorization.replaceAll("\\s*bearer\\s*", ""));
|
||||
JwtToken jwt = jwtConsumer.getJwtToken();
|
||||
|
||||
Assert.assertEquals("horst", jwt.getClaim("sub"));
|
||||
Assert.assertArrayEquals(new String[] { "a", "b" },
|
||||
((List<String>) jwt.getClaim("roles")).toArray(new String[0]));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void idpEndpointWithQueryStringTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setEndpointQueryString("extra=query");
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthenticateHeaders authenticateHeaders = getAutenticateHeaders(samlAuthenticator);
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.handleSsoGetRequestURI(authenticateHeaders.location);
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders);
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
String responseJson = new String(BytesReference.toBytes(tokenRestChannel.response.content()));
|
||||
HashMap<String, Object> response = DefaultObjectMapper.objectMapper.readValue(responseJson,
|
||||
new TypeReference<HashMap<String, Object>>() {
|
||||
});
|
||||
String authorization = (String) response.get("authorization");
|
||||
|
||||
Assert.assertNotNull("Expected authorization attribute in JSON: " + responseJson, authorization);
|
||||
|
||||
JwsJwtCompactConsumer jwtConsumer = new JwsJwtCompactConsumer(authorization.replaceAll("\\s*bearer\\s*", ""));
|
||||
JwtToken jwt = jwtConsumer.getJwtToken();
|
||||
|
||||
Assert.assertEquals("horst", jwt.getClaim("sub"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void commaSeparatedRolesTest() throws Exception {
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUserRoles(Arrays.asList("a,b"));
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("roles_seperator", ",").put("path.home", ".")
|
||||
.build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthenticateHeaders authenticateHeaders = getAutenticateHeaders(samlAuthenticator);
|
||||
|
||||
String encodedSamlResponse = mockSamlIdpServer.handleSsoGetRequestURI(authenticateHeaders.location);
|
||||
|
||||
RestRequest tokenRestRequest = buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders);
|
||||
TestRestChannel tokenRestChannel = new TestRestChannel(tokenRestRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(tokenRestChannel, null);
|
||||
|
||||
String responseJson = new String(BytesReference.toBytes(tokenRestChannel.response.content()));
|
||||
HashMap<String, Object> response = DefaultObjectMapper.objectMapper.readValue(responseJson,
|
||||
new TypeReference<HashMap<String, Object>>() {
|
||||
});
|
||||
String authorization = (String) response.get("authorization");
|
||||
|
||||
Assert.assertNotNull("Expected authorization attribute in JSON: " + responseJson, authorization);
|
||||
|
||||
JwsJwtCompactConsumer jwtConsumer = new JwsJwtCompactConsumer(authorization.replaceAll("\\s*bearer\\s*", ""));
|
||||
JwtToken jwt = jwtConsumer.getJwtToken();
|
||||
|
||||
Assert.assertEquals("horst", jwt.getClaim("sub"));
|
||||
Assert.assertArrayEquals(new String[] { "a", "b" },
|
||||
((List<String>) jwt.getClaim("roles")).toArray(new String[0]));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void basicLogoutTest() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setSpSignatureCertificate(spSigningCertificate);
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles")
|
||||
.put("sp.signature_private_key", "-BEGIN PRIVATE KEY-\n"
|
||||
+ Base64.getEncoder().encodeToString(spSigningPrivateKey.getEncoded()) + "-END PRIVATE KEY-")
|
||||
.put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials authCredentials = new AuthCredentials("horst");
|
||||
authCredentials.addAttribute("attr.jwt.sub", "horst");
|
||||
authCredentials.addAttribute("attr.jwt.saml_nif", NameIDType.UNSPECIFIED);
|
||||
authCredentials.addAttribute("attr.jwt.saml_si", "si123");
|
||||
|
||||
String logoutUrl = samlAuthenticator.buildLogoutUrl(authCredentials);
|
||||
|
||||
mockSamlIdpServer.handleSloGetRequestURI(logoutUrl);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void basicLogoutTestEncryptedKey() throws Exception {
|
||||
mockSamlIdpServer.setSignResponses(true);
|
||||
mockSamlIdpServer.loadSigningKeys("saml/kirk-keystore.jks", "kirk");
|
||||
mockSamlIdpServer.setAuthenticateUser("horst");
|
||||
mockSamlIdpServer.setSpSignatureCertificate(spSigningCertificate);
|
||||
mockSamlIdpServer.setEndpointQueryString(null);
|
||||
|
||||
Settings settings = Settings.builder().put("idp.metadata_url", mockSamlIdpServer.getMetadataUri())
|
||||
.put("kibana_url", "http://wherever").put("idp.entity_id", mockSamlIdpServer.getIdpEntityId())
|
||||
.put("exchange_key", "abc").put("roles_key", "roles").put("sp.signature_private_key", SPOCK_KEY)
|
||||
.put("sp.signature_private_key_password", "changeit").put("path.home", ".").build();
|
||||
|
||||
HTTPSamlAuthenticator samlAuthenticator = new HTTPSamlAuthenticator(settings, null);
|
||||
|
||||
AuthCredentials authCredentials = new AuthCredentials("horst");
|
||||
authCredentials.addAttribute("attr.jwt.sub", "horst");
|
||||
authCredentials.addAttribute("attr.jwt.saml_nif", NameIDType.UNSPECIFIED);
|
||||
authCredentials.addAttribute("attr.jwt.saml_si", "si123");
|
||||
|
||||
String logoutUrl = samlAuthenticator.buildLogoutUrl(authCredentials);
|
||||
|
||||
mockSamlIdpServer.handleSloGetRequestURI(logoutUrl);
|
||||
|
||||
}
|
||||
|
||||
private AuthenticateHeaders getAutenticateHeaders(HTTPSamlAuthenticator samlAuthenticator) {
|
||||
RestRequest restRequest = new FakeRestRequest(ImmutableMap.of(), new HashMap<String, String>());
|
||||
TestRestChannel restChannel = new TestRestChannel(restRequest);
|
||||
|
||||
samlAuthenticator.reRequestAuthentication(restChannel, null);
|
||||
|
||||
List<String> wwwAuthenticateHeaders = restChannel.response.getHeaders().get("WWW-Authenticate");
|
||||
|
||||
Assert.assertNotNull(wwwAuthenticateHeaders);
|
||||
Assert.assertEquals("More than one WWW-Authenticate header: " + wwwAuthenticateHeaders, 1,
|
||||
wwwAuthenticateHeaders.size());
|
||||
|
||||
String wwwAuthenticateHeader = wwwAuthenticateHeaders.get(0);
|
||||
|
||||
Matcher wwwAuthenticateHeaderMatcher = WWW_AUTHENTICATE_PATTERN.matcher(wwwAuthenticateHeader);
|
||||
|
||||
if (!wwwAuthenticateHeaderMatcher.matches()) {
|
||||
Assert.fail("Invalid WWW-Authenticate header: " + wwwAuthenticateHeader);
|
||||
}
|
||||
|
||||
Assert.assertEquals("X-Security-IdP", wwwAuthenticateHeaderMatcher.group(1));
|
||||
Assert.assertEquals("location", wwwAuthenticateHeaderMatcher.group(4));
|
||||
Assert.assertEquals("requestId", wwwAuthenticateHeaderMatcher.group(6));
|
||||
|
||||
String location = wwwAuthenticateHeaderMatcher.group(5);
|
||||
String requestId = wwwAuthenticateHeaderMatcher.group(7);
|
||||
|
||||
return new AuthenticateHeaders(location, requestId);
|
||||
}
|
||||
|
||||
private RestRequest buildTokenExchangeRestRequest(String encodedSamlResponse,
|
||||
AuthenticateHeaders authenticateHeaders) {
|
||||
return buildTokenExchangeRestRequest(encodedSamlResponse, authenticateHeaders, "/opendistrosecurity/saml/acs");
|
||||
}
|
||||
|
||||
private RestRequest buildTokenExchangeRestRequest(String encodedSamlResponse,
|
||||
AuthenticateHeaders authenticateHeaders, String acsEndpoint) {
|
||||
String authtokenPostJson;
|
||||
|
||||
if (authenticateHeaders != null) {
|
||||
authtokenPostJson = "{\"SAMLResponse\": \"" + encodedSamlResponse + "\", \"RequestId\": \""
|
||||
+ authenticateHeaders.requestId + "\"}";
|
||||
} else {
|
||||
authtokenPostJson = "{\"SAMLResponse\": \"" + encodedSamlResponse
|
||||
+ "\", \"RequestId\": null, \"acsEndpoint\": \"" + acsEndpoint + "\" }";
|
||||
}
|
||||
|
||||
return new FakeRestRequest.Builder().withPath("/_opendistro/_security/api/authtoken").withMethod(Method.POST)
|
||||
.withContent(new BytesArray(authtokenPostJson))
|
||||
.withHeaders(ImmutableMap.of("Content-Type", "application/json")).build();
|
||||
}
|
||||
|
||||
private static void initSpSigningKeys() {
|
||||
try {
|
||||
KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
|
||||
|
||||
KeyStore keyStore = KeyStore.getInstance("JKS");
|
||||
InputStream keyStream = new FileInputStream(
|
||||
FileHelper.getAbsoluteFilePathFromClassPath("saml/spock-keystore.jks").toFile());
|
||||
|
||||
keyStore.load(keyStream, "changeit".toCharArray());
|
||||
kmf.init(keyStore, "changeit".toCharArray());
|
||||
|
||||
spSigningCertificate = (X509Certificate) keyStore.getCertificate("spock");
|
||||
|
||||
spSigningPrivateKey = (PrivateKey) keyStore.getKey("spock", "changeit".toCharArray());
|
||||
|
||||
} catch (NoSuchAlgorithmException | KeyStoreException | CertificateException | IOException
|
||||
| UnrecoverableKeyException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
static class TestRestChannel implements RestChannel {
|
||||
|
||||
final RestRequest restRequest;
|
||||
RestResponse response;
|
||||
|
||||
TestRestChannel(RestRequest restRequest) {
|
||||
this.restRequest = restRequest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder newBuilder() throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder newErrorBuilder() throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder newBuilder(XContentType xContentType, boolean useFiltering) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesStreamOutput bytesOutput() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestRequest request() {
|
||||
return restRequest;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean detailedErrorsEnabled() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sendResponse(RestResponse response) {
|
||||
this.response = response;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static class AuthenticateHeaders {
|
||||
final String location;
|
||||
final String requestId;
|
||||
|
||||
AuthenticateHeaders(String location, String requestId) {
|
||||
this.location = location;
|
||||
this.requestId = requestId;
|
||||
}
|
||||
}
|
||||
}
|
1122
src/test/java/com/amazon/dlic/auth/http/saml/MockSamlIdpServer.java
Normal file
1122
src/test/java/com/amazon/dlic/auth/http/saml/MockSamlIdpServer.java
Normal file
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License").
|
||||
* You may not use this file except in compliance with the License.
|
||||
* A copy of the License is located at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* or in the "license" file accompanying this file. This file is distributed
|
||||
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
|
||||
* express or implied. See the License for the specific language governing
|
||||
* permissions and limitations under the License.
|
||||
*/
|
||||
|
||||
package com.amazon.dlic.auth.ldap;
|
||||
|
||||
import org.apache.http.HttpStatus;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.amazon.dlic.auth.ldap.srv.EmbeddedLDAPServer;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.DynamicSecurityConfig;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.SingleClusterTest;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.helper.file.FileHelper;
|
||||
import com.amazon.opendistroforelasticsearch.security.test.helper.rest.RestHelper;
|
||||
|
||||
public class LdapBackendIntegTest extends SingleClusterTest {
|
||||
|
||||
private static EmbeddedLDAPServer ldapServer = null;
|
||||
|
||||
private static int ldapPort;
|
||||
private static int ldapsPort;
|
||||
|
||||
@BeforeClass
|
||||
public static void startLdapServer() throws Exception {
|
||||
ldapServer = new EmbeddedLDAPServer();
|
||||
ldapServer.start();
|
||||
ldapServer.applyLdif("base.ldif");
|
||||
ldapPort = ldapServer.getLdapPort();
|
||||
ldapsPort = ldapServer.getLdapsPort();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getResourceFolder() {
|
||||
return "ldap";
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIntegLdapAuthenticationSSL() throws Exception {
|
||||
String securityConfigAsYamlString = FileHelper.loadFile("ldap/config.yml");
|
||||
securityConfigAsYamlString = securityConfigAsYamlString.replace("${ldapsPort}", String.valueOf(ldapsPort));
|
||||
System.out.println(securityConfigAsYamlString);
|
||||
setup(Settings.EMPTY, new DynamicSecurityConfig().setConfigAsYamlString(securityConfigAsYamlString), Settings.EMPTY);
|
||||
final RestHelper rh = nonSslRestHelper();
|
||||
Assert.assertEquals(HttpStatus.SC_OK, rh.executeGetRequest("", encodeBasicHeader("jacksonm", "secret")).getStatusCode());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@AfterClass
|
||||
public static void tearDownLdap() throws Exception {
|
||||
|
||||
if (ldapServer != null) {
|
||||
ldapServer.stop();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user