diff --git a/cmoa_install.yaml b/cmoa_install.yaml
index ca5988d..99637aa 100755
--- a/cmoa_install.yaml
+++ b/cmoa_install.yaml
@@ -5,9 +5,9 @@
environment:
KUBECONFIG: /root/.kube/ansible_config
vars:
- REGISTRY: 10.10.43.230:5000
- PullSecret: ""
+ REGISTRY: 10.10.31.243:5000/cmoa3
+ REPO: "nexus" # dockerhub or nexus
roles:
- role: cmoa_os_setting
- - role: cmoa_install
- delegate_to: 127.0.0.1
+ #- role: cmoa_install
+ #delegate_to: 127.0.0.1
diff --git a/collections/ansible_collections/community/general/LICENSES/GPL-3.0-or-later.txt b/collections/ansible_collections/community/general/LICENSES/GPL-3.0-or-later.txt
deleted file mode 120000
index 012065c..0000000
--- a/collections/ansible_collections/community/general/LICENSES/GPL-3.0-or-later.txt
+++ /dev/null
@@ -1 +0,0 @@
-../COPYING
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/LICENSES/GPL-3.0-or-later.txt b/collections/ansible_collections/community/general/LICENSES/GPL-3.0-or-later.txt
new file mode 100644
index 0000000..10926e8
--- /dev/null
+++ b/collections/ansible_collections/community/general/LICENSES/GPL-3.0-or-later.txt
@@ -0,0 +1,675 @@
+ GNU GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users. We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors. You can apply it to
+your programs, too.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights. Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+ For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received. You must make sure that they, too, receive
+or can get the source code. And you must show them these terms so they
+know their rights.
+
+ Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+ For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software. For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+ Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so. This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software. The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable. Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products. If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+ Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary. To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Use with the GNU Affero General Public License.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time. Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+ Copyright (C)
+ This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+ This is free software, and you are welcome to redistribute it
+ under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License. Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+.
+
+ The GNU General Public License does not permit incorporating your program
+into proprietary programs. If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library. If this is what you want to do, use the GNU Lesser General
+Public License instead of this License. But first, please read
+.
+
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml
deleted file mode 120000
index 7ea8984..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-common.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml
new file mode 100644
index 0000000..fd874e5
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-001_vars/default-common.yml
@@ -0,0 +1,18 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: foo
+ extra: true
+ - name: bar
+ extra: false
+ - name: meh
+ extra: true
+
+list2:
+ - name: foo
+ path: /foo
+ - name: baz
+ path: /baz
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml
deleted file mode 120000
index 7ea8984..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-common.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml
new file mode 100644
index 0000000..fd874e5
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-002_vars/default-common.yml
@@ -0,0 +1,18 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: foo
+ extra: true
+ - name: bar
+ extra: false
+ - name: meh
+ extra: true
+
+list2:
+ - name: foo
+ path: /foo
+ - name: baz
+ path: /baz
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml
deleted file mode 120000
index 299736f..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-recursive-true.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml
new file mode 100644
index 0000000..133c8f2
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-003_vars/default-recursive-true.yml
@@ -0,0 +1,24 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: myname01
+ param01:
+ x: default_value
+ y: default_value
+ list:
+ - default_value
+ - name: myname02
+ param01: [1, 1, 2, 3]
+
+list2:
+ - name: myname01
+ param01:
+ y: patch_value
+ z: patch_value
+ list:
+ - patch_value
+ - name: myname02
+ param01: [3, 4, 4, {key: value}]
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml
deleted file mode 120000
index 299736f..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-recursive-true.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml
new file mode 100644
index 0000000..133c8f2
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-004_vars/default-recursive-true.yml
@@ -0,0 +1,24 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: myname01
+ param01:
+ x: default_value
+ y: default_value
+ list:
+ - default_value
+ - name: myname02
+ param01: [1, 1, 2, 3]
+
+list2:
+ - name: myname01
+ param01:
+ y: patch_value
+ z: patch_value
+ list:
+ - patch_value
+ - name: myname02
+ param01: [3, 4, 4, {key: value}]
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml
deleted file mode 120000
index 299736f..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-recursive-true.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml
new file mode 100644
index 0000000..133c8f2
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-005_vars/default-recursive-true.yml
@@ -0,0 +1,24 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: myname01
+ param01:
+ x: default_value
+ y: default_value
+ list:
+ - default_value
+ - name: myname02
+ param01: [1, 1, 2, 3]
+
+list2:
+ - name: myname01
+ param01:
+ y: patch_value
+ z: patch_value
+ list:
+ - patch_value
+ - name: myname02
+ param01: [3, 4, 4, {key: value}]
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml
deleted file mode 120000
index 299736f..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-recursive-true.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml
new file mode 100644
index 0000000..133c8f2
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-006_vars/default-recursive-true.yml
@@ -0,0 +1,24 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: myname01
+ param01:
+ x: default_value
+ y: default_value
+ list:
+ - default_value
+ - name: myname02
+ param01: [1, 1, 2, 3]
+
+list2:
+ - name: myname01
+ param01:
+ y: patch_value
+ z: patch_value
+ list:
+ - patch_value
+ - name: myname02
+ param01: [3, 4, 4, {key: value}]
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml
deleted file mode 120000
index 299736f..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-recursive-true.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml
new file mode 100644
index 0000000..133c8f2
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-007_vars/default-recursive-true.yml
@@ -0,0 +1,24 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: myname01
+ param01:
+ x: default_value
+ y: default_value
+ list:
+ - default_value
+ - name: myname02
+ param01: [1, 1, 2, 3]
+
+list2:
+ - name: myname01
+ param01:
+ y: patch_value
+ z: patch_value
+ list:
+ - patch_value
+ - name: myname02
+ param01: [3, 4, 4, {key: value}]
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml
deleted file mode 120000
index 299736f..0000000
--- a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml
+++ /dev/null
@@ -1 +0,0 @@
-../default-recursive-true.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml
new file mode 100644
index 0000000..133c8f2
--- /dev/null
+++ b/collections/ansible_collections/community/general/docs/docsite/helper/lists_mergeby/example-008_vars/default-recursive-true.yml
@@ -0,0 +1,24 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+list1:
+ - name: myname01
+ param01:
+ x: default_value
+ y: default_value
+ list:
+ - default_value
+ - name: myname02
+ param01: [1, 1, 2, 3]
+
+list2:
+ - name: myname01
+ param01:
+ y: patch_value
+ z: patch_value
+ list:
+ - patch_value
+ - name: myname02
+ param01: [3, 4, 4, {key: value}]
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_chroot/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_chroot/runme.sh
deleted file mode 120000
index 70aa5db..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/connection_chroot/runme.sh
+++ /dev/null
@@ -1 +0,0 @@
-../connection_posix/test.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_chroot/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_chroot/runme.sh
new file mode 100755
index 0000000..9f31da6
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/connection_chroot/runme.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -eux
+
+# Connection tests for POSIX platforms use this script by linking to it from the appropriate 'connection_' target dir.
+# The name of the inventory group to test is extracted from the directory name following the 'connection_' prefix.
+
+group=$(python -c \
+ "from os import path; print(path.basename(path.abspath(path.dirname('$0'))).replace('connection_', ''))")
+
+cd ../connection
+
+INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
+ -e target_hosts="${group}" \
+ -e action_prefix= \
+ -e local_tmp=/tmp/ansible-local \
+ -e remote_tmp=/tmp/ansible-remote \
+ "$@"
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_jail/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_jail/runme.sh
deleted file mode 120000
index 70aa5db..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/connection_jail/runme.sh
+++ /dev/null
@@ -1 +0,0 @@
-../connection_posix/test.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_jail/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_jail/runme.sh
new file mode 100755
index 0000000..9f31da6
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/connection_jail/runme.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -eux
+
+# Connection tests for POSIX platforms use this script by linking to it from the appropriate 'connection_' target dir.
+# The name of the inventory group to test is extracted from the directory name following the 'connection_' prefix.
+
+group=$(python -c \
+ "from os import path; print(path.basename(path.abspath(path.dirname('$0'))).replace('connection_', ''))")
+
+cd ../connection
+
+INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
+ -e target_hosts="${group}" \
+ -e action_prefix= \
+ -e local_tmp=/tmp/ansible-local \
+ -e remote_tmp=/tmp/ansible-remote \
+ "$@"
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_lxc/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_lxc/runme.sh
deleted file mode 120000
index 70aa5db..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/connection_lxc/runme.sh
+++ /dev/null
@@ -1 +0,0 @@
-../connection_posix/test.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_lxc/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_lxc/runme.sh
new file mode 100755
index 0000000..9f31da6
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/connection_lxc/runme.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -eux
+
+# Connection tests for POSIX platforms use this script by linking to it from the appropriate 'connection_' target dir.
+# The name of the inventory group to test is extracted from the directory name following the 'connection_' prefix.
+
+group=$(python -c \
+ "from os import path; print(path.basename(path.abspath(path.dirname('$0'))).replace('connection_', ''))")
+
+cd ../connection
+
+INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
+ -e target_hosts="${group}" \
+ -e action_prefix= \
+ -e local_tmp=/tmp/ansible-local \
+ -e remote_tmp=/tmp/ansible-remote \
+ "$@"
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_lxd/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_lxd/runme.sh
deleted file mode 120000
index 70aa5db..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/connection_lxd/runme.sh
+++ /dev/null
@@ -1 +0,0 @@
-../connection_posix/test.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/connection_lxd/runme.sh b/collections/ansible_collections/community/general/tests/integration/targets/connection_lxd/runme.sh
new file mode 100755
index 0000000..9f31da6
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/connection_lxd/runme.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -eux
+
+# Connection tests for POSIX platforms use this script by linking to it from the appropriate 'connection_' target dir.
+# The name of the inventory group to test is extracted from the directory name following the 'connection_' prefix.
+
+group=$(python -c \
+ "from os import path; print(path.basename(path.abspath(path.dirname('$0'))).replace('connection_', ''))")
+
+cd ../connection
+
+INVENTORY="../connection_${group}/test_connection.inventory" ./test.sh \
+ -e target_hosts="${group}" \
+ -e action_prefix= \
+ -e local_tmp=/tmp/ansible-local \
+ -e remote_tmp=/tmp/ansible-remote \
+ "$@"
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-Fedora.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-Fedora.yml
deleted file mode 120000
index 0abaec1..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-Fedora.yml
+++ /dev/null
@@ -1 +0,0 @@
-default.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-Fedora.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-Fedora.yml
new file mode 100644
index 0000000..391ae86
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-Fedora.yml
@@ -0,0 +1,25 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Install snapd
+ package:
+ name: "{{ snap_packages }}"
+ state: present
+ notify: Remove snapd
+
+- name: Make sure that snapd is running
+ service:
+ name: snapd
+ state: started
+
+- name: Create link /snap
+ file:
+ src: /var/lib/snapd/snap
+ dest: /snap
+ state: link
+
+- name: Inform that snap is installed
+ set_fact:
+ has_snap: true
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.2.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.2.yml
deleted file mode 120000
index 0b06951..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.2.yml
+++ /dev/null
@@ -1 +0,0 @@
-nothing.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.2.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.2.yml
new file mode 100644
index 0000000..5bbfaff
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.2.yml
@@ -0,0 +1,6 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Do nothing
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.3.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.3.yml
deleted file mode 120000
index 0b06951..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.3.yml
+++ /dev/null
@@ -1 +0,0 @@
-nothing.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.3.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.3.yml
new file mode 100644
index 0000000..5bbfaff
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-8.3.yml
@@ -0,0 +1,6 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Do nothing
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-9.0.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-9.0.yml
deleted file mode 120000
index 0b06951..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-9.0.yml
+++ /dev/null
@@ -1 +0,0 @@
-nothing.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-9.0.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-9.0.yml
new file mode 100644
index 0000000..5bbfaff
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/D-RedHat-9.0.yml
@@ -0,0 +1,6 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# Do nothing
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/Debian.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/Debian.yml
deleted file mode 120000
index 0abaec1..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/Debian.yml
+++ /dev/null
@@ -1 +0,0 @@
-default.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/Debian.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/Debian.yml
new file mode 100644
index 0000000..391ae86
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/Debian.yml
@@ -0,0 +1,25 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Install snapd
+ package:
+ name: "{{ snap_packages }}"
+ state: present
+ notify: Remove snapd
+
+- name: Make sure that snapd is running
+ service:
+ name: snapd
+ state: started
+
+- name: Create link /snap
+ file:
+ src: /var/lib/snapd/snap
+ dest: /snap
+ state: link
+
+- name: Inform that snap is installed
+ set_fact:
+ has_snap: true
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/RedHat.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/RedHat.yml
deleted file mode 120000
index 0abaec1..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/RedHat.yml
+++ /dev/null
@@ -1 +0,0 @@
-default.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/RedHat.yml b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/RedHat.yml
new file mode 100644
index 0000000..391ae86
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/setup_snap/tasks/RedHat.yml
@@ -0,0 +1,25 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: Install snapd
+ package:
+ name: "{{ snap_packages }}"
+ state: present
+ notify: Remove snapd
+
+- name: Make sure that snapd is running
+ service:
+ name: snapd
+ state: started
+
+- name: Create link /snap
+ file:
+ src: /var/lib/snapd/snap
+ dest: /snap
+ state: link
+
+- name: Inform that snap is installed
+ set_fact:
+ has_snap: true
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Darwin.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Darwin.yml
deleted file mode 120000
index 7fe8a88..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Darwin.yml
+++ /dev/null
@@ -1 +0,0 @@
-install_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Darwin.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Darwin.yml
new file mode 100644
index 0000000..b1d3bd7
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Darwin.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: install supervisord
+ pip:
+ name: supervisor<4.0.0 # supervisor version 4.0.0 fails tests
+ state: present
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_FreeBSD.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_FreeBSD.yml
deleted file mode 120000
index 7fe8a88..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_FreeBSD.yml
+++ /dev/null
@@ -1 +0,0 @@
-install_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_FreeBSD.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_FreeBSD.yml
new file mode 100644
index 0000000..b1d3bd7
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_FreeBSD.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: install supervisord
+ pip:
+ name: supervisor<4.0.0 # supervisor version 4.0.0 fails tests
+ state: present
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_RedHat.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_RedHat.yml
deleted file mode 120000
index 7fe8a88..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_RedHat.yml
+++ /dev/null
@@ -1 +0,0 @@
-install_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_RedHat.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_RedHat.yml
new file mode 100644
index 0000000..b1d3bd7
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_RedHat.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: install supervisord
+ pip:
+ name: supervisor<4.0.0 # supervisor version 4.0.0 fails tests
+ state: present
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Suse.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Suse.yml
deleted file mode 120000
index 7fe8a88..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Suse.yml
+++ /dev/null
@@ -1 +0,0 @@
-install_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Suse.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Suse.yml
new file mode 100644
index 0000000..b1d3bd7
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/install_Suse.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: install supervisord
+ pip:
+ name: supervisor<4.0.0 # supervisor version 4.0.0 fails tests
+ state: present
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Darwin.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Darwin.yml
deleted file mode 120000
index 577ca5a..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Darwin.yml
+++ /dev/null
@@ -1 +0,0 @@
-uninstall_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Darwin.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Darwin.yml
new file mode 100644
index 0000000..cf339df
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Darwin.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: uninstall supervisord
+ pip:
+ name: supervisor
+ state: absent
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_FreeBSD.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_FreeBSD.yml
deleted file mode 120000
index 577ca5a..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_FreeBSD.yml
+++ /dev/null
@@ -1 +0,0 @@
-uninstall_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_FreeBSD.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_FreeBSD.yml
new file mode 100644
index 0000000..cf339df
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_FreeBSD.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: uninstall supervisord
+ pip:
+ name: supervisor
+ state: absent
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_RedHat.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_RedHat.yml
deleted file mode 120000
index 577ca5a..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_RedHat.yml
+++ /dev/null
@@ -1 +0,0 @@
-uninstall_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_RedHat.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_RedHat.yml
new file mode 100644
index 0000000..cf339df
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_RedHat.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: uninstall supervisord
+ pip:
+ name: supervisor
+ state: absent
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Suse.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Suse.yml
deleted file mode 120000
index 577ca5a..0000000
--- a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Suse.yml
+++ /dev/null
@@ -1 +0,0 @@
-uninstall_pip.yml
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Suse.yml b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Suse.yml
new file mode 100644
index 0000000..cf339df
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/integration/targets/supervisorctl/tasks/uninstall_Suse.yml
@@ -0,0 +1,9 @@
+---
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+- name: uninstall supervisord
+ pip:
+ name: supervisor
+ state: absent
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/aix.sh b/collections/ansible_collections/community/general/tests/utils/shippable/aix.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/aix.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/aix.sh b/collections/ansible_collections/community/general/tests/utils/shippable/aix.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/aix.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/alpine.sh b/collections/ansible_collections/community/general/tests/utils/shippable/alpine.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/alpine.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/alpine.sh b/collections/ansible_collections/community/general/tests/utils/shippable/alpine.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/alpine.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/fedora.sh b/collections/ansible_collections/community/general/tests/utils/shippable/fedora.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/fedora.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/fedora.sh b/collections/ansible_collections/community/general/tests/utils/shippable/fedora.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/fedora.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/freebsd.sh b/collections/ansible_collections/community/general/tests/utils/shippable/freebsd.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/freebsd.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/freebsd.sh b/collections/ansible_collections/community/general/tests/utils/shippable/freebsd.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/freebsd.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/macos.sh b/collections/ansible_collections/community/general/tests/utils/shippable/macos.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/macos.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/macos.sh b/collections/ansible_collections/community/general/tests/utils/shippable/macos.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/macos.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/osx.sh b/collections/ansible_collections/community/general/tests/utils/shippable/osx.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/osx.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/osx.sh b/collections/ansible_collections/community/general/tests/utils/shippable/osx.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/osx.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/rhel.sh b/collections/ansible_collections/community/general/tests/utils/shippable/rhel.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/rhel.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/rhel.sh b/collections/ansible_collections/community/general/tests/utils/shippable/rhel.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/rhel.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/ubuntu.sh b/collections/ansible_collections/community/general/tests/utils/shippable/ubuntu.sh
deleted file mode 120000
index 6ddb776..0000000
--- a/collections/ansible_collections/community/general/tests/utils/shippable/ubuntu.sh
+++ /dev/null
@@ -1 +0,0 @@
-remote.sh
\ No newline at end of file
diff --git a/collections/ansible_collections/community/general/tests/utils/shippable/ubuntu.sh b/collections/ansible_collections/community/general/tests/utils/shippable/ubuntu.sh
new file mode 100755
index 0000000..84c1ebb
--- /dev/null
+++ b/collections/ansible_collections/community/general/tests/utils/shippable/ubuntu.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright (c) Ansible Project
+# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+set -o pipefail -eux
+
+declare -a args
+IFS='/:' read -ra args <<< "$1"
+
+platform="${args[0]}"
+version="${args[1]}"
+
+if [ "${#args[@]}" -gt 2 ]; then
+ target="azp/posix/${args[2]}/"
+else
+ target="azp/posix/"
+fi
+
+stage="${S:-prod}"
+provider="${P:-default}"
+
+if [ "${platform}" == "rhel" ] && [[ "${version}" =~ ^8 ]]; then
+ echo "pynacl >= 1.4.0, < 1.5.0; python_version == '3.6'" >> tests/utils/constraints.txt
+fi
+
+# shellcheck disable=SC2086
+ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} ${UNSTABLE:+"$UNSTABLE"} \
+ --remote "${platform}/${version}" --remote-terminate always --remote-stage "${stage}" --remote-provider "${provider}"
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/helm.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/helm.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_info.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_info.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/helm_info.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_info.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_info.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/helm_info.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin_info.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin_info.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin_info.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin_info.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin_info.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/helm_plugin_info.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_repository.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_repository.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/helm_repository.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/helm_repository.py b/collections/ansible_collections/kubernetes/core/plugins/action/helm_repository.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/helm_repository.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cluster_info.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cluster_info.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cluster_info.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cluster_info.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cluster_info.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cluster_info.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cp.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cp.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cp.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cp.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cp.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_cp.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_drain.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_drain.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_drain.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_drain.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_drain.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_drain.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_exec.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_exec.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_exec.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_exec.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_exec.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_exec.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_log.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_log.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_log.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_log.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_log.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_log.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_rollback.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_rollback.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_rollback.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_rollback.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_rollback.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_rollback.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_scale.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_scale.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_scale.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_scale.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_scale.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_scale.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_service.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_service.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_service.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/k8s_service.py b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_service.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/k8s_service.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/ks8_json_patch.py b/collections/ansible_collections/kubernetes/core/plugins/action/ks8_json_patch.py
deleted file mode 120000
index 667980e..0000000
--- a/collections/ansible_collections/kubernetes/core/plugins/action/ks8_json_patch.py
+++ /dev/null
@@ -1 +0,0 @@
-k8s_info.py
\ No newline at end of file
diff --git a/collections/ansible_collections/kubernetes/core/plugins/action/ks8_json_patch.py b/collections/ansible_collections/kubernetes/core/plugins/action/ks8_json_patch.py
new file mode 100644
index 0000000..cfcbb28
--- /dev/null
+++ b/collections/ansible_collections/kubernetes/core/plugins/action/ks8_json_patch.py
@@ -0,0 +1,398 @@
+# Copyright (c) 2012-2014, Michael DeHaan
+# Copyright (c) 2017, Toshio Kuratomi
+# Copyright (c) 2020, Ansible Project
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import absolute_import, division, print_function
+
+__metaclass__ = type
+
+import copy
+import traceback
+import os
+from contextlib import contextmanager
+
+from ansible.config.manager import ensure_type
+from ansible.errors import (
+ AnsibleError,
+ AnsibleFileNotFound,
+ AnsibleAction,
+ AnsibleActionFail,
+)
+from ansible.module_utils.parsing.convert_bool import boolean
+from ansible.module_utils.six import string_types, iteritems
+from ansible.module_utils._text import to_text, to_bytes, to_native
+from ansible.plugins.action import ActionBase
+
+
+class RemoveOmit(object):
+ def __init__(self, buffer, omit_value):
+ try:
+ import yaml
+ except ImportError:
+ raise AnsibleError("Failed to import the required Python library (PyYAML).")
+ self.data = yaml.safe_load_all(buffer)
+ self.omit = omit_value
+
+ def remove_omit(self, data):
+ if isinstance(data, dict):
+ result = dict()
+ for key, value in iteritems(data):
+ if value == self.omit:
+ continue
+ result[key] = self.remove_omit(value)
+ return result
+ if isinstance(data, list):
+ return [self.remove_omit(v) for v in data if v != self.omit]
+ return data
+
+ def output(self):
+ return [self.remove_omit(d) for d in self.data]
+
+
+class ActionModule(ActionBase):
+
+ TRANSFERS_FILES = True
+ DEFAULT_NEWLINE_SEQUENCE = "\n"
+
+ def _ensure_invocation(self, result):
+ # NOTE: adding invocation arguments here needs to be kept in sync with
+ # any no_log specified in the argument_spec in the module.
+ if "invocation" not in result:
+ if self._play_context.no_log:
+ result["invocation"] = "CENSORED: no_log is set"
+ else:
+ result["invocation"] = self._task.args.copy()
+ result["invocation"]["module_args"] = self._task.args.copy()
+
+ return result
+
+ @contextmanager
+ def get_template_data(self, template_path):
+ try:
+ source = self._find_needle("templates", template_path)
+ except AnsibleError as e:
+ raise AnsibleActionFail(to_text(e))
+
+ # Get vault decrypted tmp file
+ try:
+ tmp_source = self._loader.get_real_file(source)
+ except AnsibleFileNotFound as e:
+ raise AnsibleActionFail(
+ "could not find template=%s, %s" % (source, to_text(e))
+ )
+ b_tmp_source = to_bytes(tmp_source, errors="surrogate_or_strict")
+
+ try:
+ with open(b_tmp_source, "rb") as f:
+ try:
+ template_data = to_text(f.read(), errors="surrogate_or_strict")
+ except UnicodeError:
+ raise AnsibleActionFail(
+ "Template source files must be utf-8 encoded"
+ )
+ yield template_data
+ except AnsibleAction:
+ raise
+ except Exception as e:
+ raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
+ finally:
+ self._loader.cleanup_tmp_file(b_tmp_source)
+
+ def get_template_args(self, template):
+ template_param = {
+ "newline_sequence": self.DEFAULT_NEWLINE_SEQUENCE,
+ "variable_start_string": None,
+ "variable_end_string": None,
+ "block_start_string": None,
+ "block_end_string": None,
+ "trim_blocks": True,
+ "lstrip_blocks": False,
+ }
+ if isinstance(template, string_types):
+ # treat this as raw_params
+ template_param["path"] = template
+ elif isinstance(template, dict):
+ template_args = template
+ template_path = template_args.get("path", None)
+ if not template_path:
+ raise AnsibleActionFail("Please specify path for template.")
+ template_param["path"] = template_path
+
+ # Options type validation strings
+ for s_type in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ ):
+ if s_type in template_args:
+ value = ensure_type(template_args[s_type], "string")
+ if value is not None and not isinstance(value, string_types):
+ raise AnsibleActionFail(
+ "%s is expected to be a string, but got %s instead"
+ % (s_type, type(value))
+ )
+ try:
+ template_param.update(
+ {
+ "trim_blocks": boolean(
+ template_args.get("trim_blocks", True), strict=False
+ ),
+ "lstrip_blocks": boolean(
+ template_args.get("lstrip_blocks", False), strict=False
+ ),
+ }
+ )
+ except TypeError as e:
+ raise AnsibleActionFail(to_native(e))
+
+ template_param.update(
+ {
+ "newline_sequence": template_args.get(
+ "newline_sequence", self.DEFAULT_NEWLINE_SEQUENCE
+ ),
+ "variable_start_string": template_args.get(
+ "variable_start_string", None
+ ),
+ "variable_end_string": template_args.get(
+ "variable_end_string", None
+ ),
+ "block_start_string": template_args.get("block_start_string", None),
+ "block_end_string": template_args.get("block_end_string", None),
+ }
+ )
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+ return template_param
+
+ def import_jinja2_lstrip(self, templates):
+ # Option `lstrip_blocks' was added in Jinja2 version 2.7.
+ if any(tmp["lstrip_blocks"] for tmp in templates):
+ try:
+ import jinja2.defaults
+ except ImportError:
+ raise AnsibleError(
+ "Unable to import Jinja2 defaults for determining Jinja2 features."
+ )
+
+ try:
+ jinja2.defaults.LSTRIP_BLOCKS
+ except AttributeError:
+ raise AnsibleError(
+ "Option `lstrip_blocks' is only available in Jinja2 versions >=2.7"
+ )
+
+ def load_template(self, template, new_module_args, task_vars):
+ # template is only supported by k8s module.
+ if self._task.action not in (
+ "k8s",
+ "kubernetes.core.k8s",
+ "community.okd.k8s",
+ "redhat.openshift.k8s",
+ "community.kubernetes.k8s",
+ "openshift_adm_groups_sync",
+ "community.okd.openshift_adm_groups_sync",
+ "redhat.openshift.openshift_adm_groups_sync",
+ ):
+ raise AnsibleActionFail(
+ "'template' is only a supported parameter for the 'k8s' module."
+ )
+
+ omit_value = task_vars.get("omit")
+ template_params = []
+ if isinstance(template, string_types) or isinstance(template, dict):
+ template_params.append(self.get_template_args(template))
+ elif isinstance(template, list):
+ for element in template:
+ template_params.append(self.get_template_args(element))
+ else:
+ raise AnsibleActionFail(
+ "Error while reading template file - "
+ "a string or dict for template expected, but got %s instead"
+ % type(template)
+ )
+
+ self.import_jinja2_lstrip(template_params)
+
+ wrong_sequences = ["\\n", "\\r", "\\r\\n"]
+ allowed_sequences = ["\n", "\r", "\r\n"]
+
+ result_template = []
+ old_vars = self._templar.available_variables
+
+ default_environment = {}
+ for key in (
+ "newline_sequence",
+ "variable_start_string",
+ "variable_end_string",
+ "block_start_string",
+ "block_end_string",
+ "trim_blocks",
+ "lstrip_blocks",
+ ):
+ if hasattr(self._templar.environment, key):
+ default_environment[key] = getattr(self._templar.environment, key)
+ for template_item in template_params:
+ # We need to convert unescaped sequences to proper escaped sequences for Jinja2
+ newline_sequence = template_item["newline_sequence"]
+ if newline_sequence in wrong_sequences:
+ template_item["newline_sequence"] = allowed_sequences[
+ wrong_sequences.index(newline_sequence)
+ ]
+ elif newline_sequence not in allowed_sequences:
+ raise AnsibleActionFail(
+ "newline_sequence needs to be one of: \n, \r or \r\n"
+ )
+
+ # template the source data locally & get ready to transfer
+ with self.get_template_data(template_item["path"]) as template_data:
+ # add ansible 'template' vars
+ temp_vars = copy.deepcopy(task_vars)
+ for key, value in iteritems(template_item):
+ if hasattr(self._templar.environment, key):
+ if value is not None:
+ setattr(self._templar.environment, key, value)
+ else:
+ setattr(
+ self._templar.environment,
+ key,
+ default_environment.get(key),
+ )
+ self._templar.available_variables = temp_vars
+ result = self._templar.do_template(
+ template_data,
+ preserve_trailing_newlines=True,
+ escape_backslashes=False,
+ )
+ if omit_value is not None:
+ result_template.extend(RemoveOmit(result, omit_value).output())
+ else:
+ result_template.append(result)
+ self._templar.available_variables = old_vars
+ resource_definition = self._task.args.get("definition", None)
+ if not resource_definition:
+ new_module_args.pop("template")
+ new_module_args["definition"] = result_template
+
+ def get_file_realpath(self, local_path):
+ # local_path is only supported by k8s_cp module.
+ if self._task.action not in (
+ "k8s_cp",
+ "kubernetes.core.k8s_cp",
+ "community.kubernetes.k8s_cp",
+ ):
+ raise AnsibleActionFail(
+ "'local_path' is only supported parameter for 'k8s_cp' module."
+ )
+
+ if os.path.exists(local_path):
+ return local_path
+
+ try:
+ # find in expected paths
+ return self._find_needle("files", local_path)
+ except AnsibleError:
+ raise AnsibleActionFail(
+ "%s does not exist in local filesystem" % local_path
+ )
+
+ def get_kubeconfig(self, kubeconfig, remote_transport, new_module_args):
+ if isinstance(kubeconfig, string_types):
+ # find the kubeconfig in the expected search path
+ if not remote_transport:
+ # kubeconfig is local
+ # find in expected paths
+ kubeconfig = self._find_needle("files", kubeconfig)
+
+ # decrypt kubeconfig found
+ actual_file = self._loader.get_real_file(kubeconfig, decrypt=True)
+ new_module_args["kubeconfig"] = actual_file
+
+ elif isinstance(kubeconfig, dict):
+ new_module_args["kubeconfig"] = kubeconfig
+ else:
+ raise AnsibleActionFail(
+ "Error while reading kubeconfig parameter - "
+ "a string or dict expected, but got %s instead" % type(kubeconfig)
+ )
+
+ def run(self, tmp=None, task_vars=None):
+ """handler for k8s options"""
+ if task_vars is None:
+ task_vars = dict()
+
+ result = super(ActionModule, self).run(tmp, task_vars)
+ del tmp # tmp no longer has any effect
+
+ # Check current transport connection and depending upon
+ # look for kubeconfig and src
+ # 'local' => look files on Ansible Controller
+ # Transport other than 'local' => look files on remote node
+ remote_transport = self._connection.transport != "local"
+
+ new_module_args = copy.deepcopy(self._task.args)
+
+ kubeconfig = self._task.args.get("kubeconfig", None)
+ if kubeconfig:
+ try:
+ self.get_kubeconfig(kubeconfig, remote_transport, new_module_args)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ # find the file in the expected search path
+ src = self._task.args.get("src", None)
+
+ if src:
+ if remote_transport:
+ # src is on remote node
+ result.update(
+ self._execute_module(
+ module_name=self._task.action, task_vars=task_vars
+ )
+ )
+ return self._ensure_invocation(result)
+
+ # src is local
+ try:
+ # find in expected paths
+ src = self._find_needle("files", src)
+ except AnsibleError as e:
+ result["failed"] = True
+ result["msg"] = to_text(e)
+ result["exception"] = traceback.format_exc()
+ return result
+
+ if src:
+ new_module_args["src"] = src
+
+ template = self._task.args.get("template", None)
+ if template:
+ self.load_template(template, new_module_args, task_vars)
+
+ local_path = self._task.args.get("local_path")
+ state = self._task.args.get("state", None)
+ if local_path and state == "to_pod" and not remote_transport:
+ new_module_args["local_path"] = self.get_file_realpath(local_path)
+
+ # Execute the k8s_* module.
+ module_return = self._execute_module(
+ module_name=self._task.action,
+ module_args=new_module_args,
+ task_vars=task_vars,
+ )
+
+ # Delete tmp path
+ self._remove_tmp_path(self._connection._shell.tmpdir)
+
+ result.update(module_return)
+
+ return self._ensure_invocation(result)
diff --git a/inventory b/inventory
index e5db835..b6ba6b7 100755
--- a/inventory
+++ b/inventory
@@ -1,11 +1,11 @@
[master]
-10.10.43.227
+10.10.43.206
[worker1]
-10.10.43.228
+10.10.43.207
[worker2]
-10.10.43.229
+10.10.43.208
[cluster:children]
master
diff --git a/roles/cmoa_install/defaults/main.yml b/roles/cmoa_install/defaults/main.yml
index 2be26ae..700df0c 100644
--- a/roles/cmoa_install/defaults/main.yml
+++ b/roles/cmoa_install/defaults/main.yml
@@ -4,7 +4,7 @@ helm_version: v3.10.3
# cmoa info
cmoa_namespace: imxc
-cmoa_version: rel3.5.3
+cmoa_version: rel3.5.4
# files/00-default in role
docker_secret_file: secret_nexus.yaml
@@ -66,3 +66,6 @@ keycloak_clients:
# default ip/version (not change)
before_ip: 111.111.111.111
before_version: rel0.0.0
+
+# docker or nexus
+REPO: dockerhub
diff --git a/roles/cmoa_install/files/00-default/sa_patch.sh b/roles/cmoa_install/files/00-default/sa_patch.sh
index 618a35b..4cbc3c3 100755
--- a/roles/cmoa_install/files/00-default/sa_patch.sh
+++ b/roles/cmoa_install/files/00-default/sa_patch.sh
@@ -4,5 +4,5 @@ export KUBECONFIG=$1
kubectl wait node --for=condition=ready --all --timeout=60s
-#kubectl -n imxc patch sa default -p '{"imagePullSecrets": [{"name": "regcred"}]}'
+kubectl -n imxc patch sa default -p '{"imagePullSecrets": [{"name": "regcred"}]}'
kubectl -n default patch sa default -p '{"imagePullSecrets": [{"name": "regcred"}]}'
diff --git a/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-master.yaml b/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-master.yaml
index 21a9298..fee94f4 100644
--- a/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-master.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-master.yaml
@@ -40,6 +40,8 @@ spec:
labels:
app: metric-analyzer-master
spec:
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
containers:
- image: {{ .Values.global.IMXC_IN_REGISTRY }}/metric_analyzer:{{ .Values.global.METRIC_ANALYZER_MASTER_VERSION }}
imagePullPolicy: IfNotPresent
diff --git a/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-worker.yaml b/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-worker.yaml
index 7e6eaea..76eea2b 100644
--- a/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-worker.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/analysis/templates/imxc-metric-analyzer-worker.yaml
@@ -15,6 +15,8 @@ spec:
labels:
app: metric-analyzer-worker
spec:
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
containers:
- image: {{ .Values.global.IMXC_IN_REGISTRY }}/metric_analyzer_worker:{{ .Values.global.METRIC_ANALYZER_WORKER_VERSION }}
imagePullPolicy: IfNotPresent
diff --git a/roles/cmoa_install/files/02-base/base/charts/cortex/templates/alertmanager/alertmanager-dep.yaml b/roles/cmoa_install/files/02-base/base/charts/cortex/templates/alertmanager/alertmanager-dep.yaml
index 49c4ca7..031170d 100644
--- a/roles/cmoa_install/files/02-base/base/charts/cortex/templates/alertmanager/alertmanager-dep.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/cortex/templates/alertmanager/alertmanager-dep.yaml
@@ -14,6 +14,8 @@ spec:
labels:
name: alertmanager
spec:
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
containers:
- name: alertmanager
# image: quay.io/cortexproject/cortex:v1.9.0
diff --git a/roles/cmoa_install/files/02-base/base/charts/cortex/values.yaml b/roles/cmoa_install/files/02-base/base/charts/cortex/values.yaml
index 1695d69..1f5f409 100644
--- a/roles/cmoa_install/files/02-base/base/charts/cortex/values.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/cortex/values.yaml
@@ -1,6 +1,6 @@
image:
#repository: quay.io/cortexproject/cortex
- repository: 0.0.0.0/cmoa3/cortex
+ repository: 0.0.0.0/cortex
# -- Allows you to override the cortex version in this chart. Use at your own risk.
#tag: ""
tag: v1.11.0
@@ -334,7 +334,7 @@ alertmanager:
# -- Sidecars that collect the configmaps with specified label and stores the included files them into the respective folders
sidecar:
image:
- repository: 0.0.0.0/cmoa3/k8s-sidecar
+ repository: 0.0.0.0/k8s-sidecar
tag: 1.10.7
sha: ""
imagePullPolicy: IfNotPresent
@@ -701,7 +701,7 @@ ruler:
# -- Sidecars that collect the configmaps with specified label and stores the included files them into the respective folders
sidecar:
image:
- repository: 0.0.0.0/cmoa3/k8s-sidecar
+ repository: 0.0.0.0/k8s-sidecar
tag: 1.10.7
sha: ""
imagePullPolicy: IfNotPresent
@@ -1126,7 +1126,7 @@ nginx:
basicAuthSecretName: ""
image:
- repository: 0.0.0.0/cmoa3/nginx
+ repository: 0.0.0.0/nginx
tag: 1.21
pullPolicy: IfNotPresent
diff --git a/roles/cmoa_install/files/02-base/base/charts/elasticsearch/templates/6.statefulset.yaml b/roles/cmoa_install/files/02-base/base/charts/elasticsearch/templates/6.statefulset.yaml
index 250258e..8cca4a3 100644
--- a/roles/cmoa_install/files/02-base/base/charts/elasticsearch/templates/6.statefulset.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/elasticsearch/templates/6.statefulset.yaml
@@ -55,6 +55,8 @@ spec:
volumeMounts:
- name: elasticsearch-data
mountPath: /usr/share/elasticsearch/data
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
containers:
- name: elasticsearch
resources:
diff --git a/roles/cmoa_install/files/02-base/base/charts/kafka-manager/templates/1.kafka-manager.yaml b/roles/cmoa_install/files/02-base/base/charts/kafka-manager/templates/1.kafka-manager.yaml
index 4edcf32..0e42d09 100644
--- a/roles/cmoa_install/files/02-base/base/charts/kafka-manager/templates/1.kafka-manager.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/kafka-manager/templates/1.kafka-manager.yaml
@@ -13,6 +13,8 @@ spec:
labels:
app: kafka-manager
spec:
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
containers:
- name: kafka-manager
image: {{ .Values.global.IMXC_IN_REGISTRY }}/kafka-manager:{{ .Values.global.KAFKA_MANAGER_VERSION }}
diff --git a/roles/cmoa_install/files/02-base/base/charts/kafka-ui/values.yaml b/roles/cmoa_install/files/02-base/base/charts/kafka-ui/values.yaml
index 733d037..e8d2186 100644
--- a/roles/cmoa_install/files/02-base/base/charts/kafka-ui/values.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/kafka-ui/values.yaml
@@ -1,13 +1,14 @@
replicaCount: 1
image:
- registry: docker.io
- repository: provectuslabs/kafka-ui
+ registry: 0.0.0.0
+ repository: kafka-ui
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
-imagePullSecrets: []
+imagePullSecrets:
+ - name: regcred
nameOverride: ""
fullnameOverride: ""
diff --git a/roles/cmoa_install/files/02-base/base/charts/kafka/templates/5.kafka.yaml b/roles/cmoa_install/files/02-base/base/charts/kafka/templates/5.kafka.yaml
index 7b0c154..6b5df93 100644
--- a/roles/cmoa_install/files/02-base/base/charts/kafka/templates/5.kafka.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/kafka/templates/5.kafka.yaml
@@ -40,6 +40,8 @@ spec:
- zookeeper
topologyKey: "kubernetes.io/hostname"
terminationGracePeriodSeconds: 30
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
initContainers:
- name: init-config
image: {{ .Values.global.IMXC_IN_REGISTRY }}/kafka-initutils:{{ .Values.global.KAFKA_INITUTILS_VERSION }}
diff --git a/roles/cmoa_install/files/02-base/base/charts/postgres/templates/4.postgres-deployment.yaml b/roles/cmoa_install/files/02-base/base/charts/postgres/templates/4.postgres-deployment.yaml
index 81192aa..f635a95 100644
--- a/roles/cmoa_install/files/02-base/base/charts/postgres/templates/4.postgres-deployment.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/postgres/templates/4.postgres-deployment.yaml
@@ -19,6 +19,8 @@ spec:
labels:
app: postgres
spec:
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
containers:
- name: postgres
image: {{ .Values.global.IMXC_IN_REGISTRY }}/postgres:{{ .Values.global.POSTGRES_VERSION }}
diff --git a/roles/cmoa_install/files/02-base/base/charts/rabbitmq/values.yaml b/roles/cmoa_install/files/02-base/base/charts/rabbitmq/values.yaml
index 27705b1..c037e04 100644
--- a/roles/cmoa_install/files/02-base/base/charts/rabbitmq/values.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/rabbitmq/values.yaml
@@ -18,7 +18,7 @@
## @param image.debug Set to true if you would like to see extra information on logs
##
image:
- registry: 0.0.0.0/cmoa3
+ registry: 0.0.0.0
repository: rabbitmq
tag: v1.0.0 # {{ .Values.global.RABBITMQ_VERSION }}
@@ -1113,7 +1113,7 @@ volumePermissions:
## @param volumePermissions.image.pullSecrets Specify docker-registry secret names as an array
##
image:
- registry: 0.0.0.0/cmoa3 # docker.io
+ registry: 0.0.0.0 # docker.io
repository: bitnami-shell # bitnami/bitnami-shell
tag: 10-debian-10-r175
## Specify a imagePullPolicy
diff --git a/roles/cmoa_install/files/02-base/base/charts/redis/values.yaml b/roles/cmoa_install/files/02-base/base/charts/redis/values.yaml
index 98a42c7..8127cf1 100644
--- a/roles/cmoa_install/files/02-base/base/charts/redis/values.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/redis/values.yaml
@@ -13,7 +13,7 @@ global:
## ref: https://hub.docker.com/r/bitnami/redis/tags/
##
image:
- registry: 0.0.0.0/cmoa3
+ registry: 0.0.0.0
repository: redis
## Bitnami Redis(TM) image tag
## ref: https://github.com/bitnami/bitnami-docker-redis#supported-tags-and-respective-dockerfile-links
@@ -28,8 +28,8 @@ image:
## Secrets must be manually created in the namespace.
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
##
- # pullSecrets:
- # - myRegistryKeySecretName
+ pullSecrets:
+ - regcred
## String to partially override redis.fullname template (will maintain the release name)
##
@@ -75,8 +75,8 @@ sentinel:
## Secrets must be manually created in the namespace.
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
##
- # pullSecrets:
- # - myRegistryKeySecretName
+ pullSecrets:
+ - regcred
masterSet: mymaster
initialCheckTimeout: 5
quorum: 2
@@ -735,8 +735,8 @@ metrics:
## Secrets must be manually created in the namespace.
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
##
- # pullSecrets:
- # - myRegistryKeySecretName
+ pullSecrets:
+ - regcred
## Metrics exporter resource requests and limits
## ref: http://kubernetes.io/docs/user-guide/compute-resources/
@@ -863,8 +863,8 @@ volumePermissions:
## Secrets must be manually created in the namespace.
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
##
- # pullSecrets:
- # - myRegistryKeySecretName
+ pullSecrets:
+ - regcred
resources: {}
# resources:
# requests:
@@ -906,8 +906,8 @@ sysctlImage:
## Secrets must be manually created in the namespace.
## ref: https://kubernetes.io/docs/tasks/configure-pod-container/pull-image-private-registry/
##
- # pullSecrets:
- # - myRegistryKeySecretName
+ pullSecrets:
+ - regcred
mountHostSys: false
resources: {}
# resources:
diff --git a/roles/cmoa_install/files/02-base/base/charts/zookeeper/templates/4.statefulset.yaml b/roles/cmoa_install/files/02-base/base/charts/zookeeper/templates/4.statefulset.yaml
index c14e047..b2d2d98 100644
--- a/roles/cmoa_install/files/02-base/base/charts/zookeeper/templates/4.statefulset.yaml
+++ b/roles/cmoa_install/files/02-base/base/charts/zookeeper/templates/4.statefulset.yaml
@@ -20,6 +20,8 @@ spec:
storage: persistent
annotations:
spec:
+ imagePullSecrets:
+ - name: {{ .Values.global.imagepullsecrets }}
terminationGracePeriodSeconds: 10
initContainers:
- name: init-config
diff --git a/roles/cmoa_install/files/02-base/base/values.yaml b/roles/cmoa_install/files/02-base/base/values.yaml
index efee8d9..bf8040c 100644
--- a/roles/cmoa_install/files/02-base/base/values.yaml
+++ b/roles/cmoa_install/files/02-base/base/values.yaml
@@ -71,3 +71,5 @@ global:
image:
registry: 10.10.43.230:5000/cmoa3 # {{ .Values.global.IMXC_REGISTRY }}
tag: v1.0.0 # {{ .Values.global.RABBITMQ_VERSION }}
+
+ imagepullsecrets: regcred
diff --git a/roles/cmoa_install/files/03-ddl-dml/postgres/patch/postgres_patch_3.5.4.psql b/roles/cmoa_install/files/03-ddl-dml/postgres/patch/postgres_patch_3.5.4.psql
new file mode 100644
index 0000000..b080944
--- /dev/null
+++ b/roles/cmoa_install/files/03-ddl-dml/postgres/patch/postgres_patch_3.5.4.psql
@@ -0,0 +1,68 @@
+-- DeploymentConfig 지원을 위한 Table 추가
+CREATE TABLE cmoa_deploymentconfig_base(
+ kube_flatting_time bigint,
+ cluster_id varchar(255),
+ kind varchar(30),
+ metadata_uid varchar(40),
+ row_index int,
+ kind_status varchar(50),
+ metadata_creationTimestamp varchar(25),
+ metadata_name text,
+ metadata_namespace text,
+ metadata_resourceVersion text,
+ spec_replicas text,
+ spec_template_spec_containers_image text,
+ spec_template_metadata_labels text,
+ status_availableReplicas text,
+ status_updatedReplicas text,
+ status_replicas text,
+ status_unavailableReplicas text,
+ create_time timestamp default now(),
+ PRIMARY KEY (kube_flatting_time, cluster_id, kind, metadata_uid, row_index)
+);
+
+-- ReplicationController 지원을 위한 Table 추가
+CREATE TABLE cmoa_replicationcontroller_base (
+ kube_flatting_time bigint,
+ cluster_id varchar(255),
+ kind varchar(30),
+ metadata_uid varchar(40),
+ row_index int,
+ kind_status varchar(50),
+ metadata_annotations text,
+ metadata_creationtimestamp varchar(25),
+ metadata_labels text,
+ metadata_name text,
+ metadata_namespace text,
+ metadata_resourceversion text,
+ spec_replicas text,
+ status_availablereplicas text,
+ status_readyreplicas text,
+ status_replicas text,
+ status_observedgeneration text,
+ create_time timestamp default now(),
+ PRIMARY KEY (kube_flatting_time, cluster_id, kind, metadata_uid, row_index)
+);
+
+create table public.audit_log
+(
+ id bigserial
+ constraint audit_log_pk
+ primary key,
+ created_date timestamp not null,
+ user_id varchar not null,
+ type varchar not null,
+ menu_path varchar not null,
+ result boolean not null,
+ target jsonb
+);
+
+
+
+-- DeploymentConfig Event 관련 metric meta 추가
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_deploymentconfig','Kubernetes DeploymentConfig Event','Kubernetes DeploymentConfig Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="DeploymentConfig", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_count','Host Count','Host Count','count(node_boot_time_seconds{is_host="true"})','Host','System','','true','false','Host Count : {{humanize $value}}');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_alive','Host Alive','Host Alive','count(node_boot_time_seconds{is_host="true", {filter}})','Host','System','','true','false','Host Alive : {{$labels.instance}}');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_dead','Host Dead','Host Dead','absent(node_boot_time_seconds{is_host="true", {filter}})','Host','System','','true','false','Host Dead : {{$labels.instance}}');
+
+commit;
diff --git a/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_ddl.psql b/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_ddl.psql
index 349f9f3..1453be5 100644
--- a/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_ddl.psql
+++ b/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_ddl.psql
@@ -1085,6 +1085,28 @@ CREATE TABLE cmoa_deployment_base(
PRIMARY KEY (kube_flatting_time, cluster_id, kind, metadata_uid, row_index)
);
----------------------------
+CREATE TABLE cmoa_deploymentconfig_base(
+ kube_flatting_time bigint,
+ cluster_id varchar(255),
+ kind varchar(30),
+ metadata_uid varchar(40),
+ row_index int,
+ kind_status varchar(50),
+ metadata_creationTimestamp varchar(25),
+ metadata_name text,
+ metadata_namespace text,
+ metadata_resourceVersion text,
+ spec_replicas text,
+ spec_template_spec_containers_image text,
+ spec_template_metadata_labels text,
+ status_availableReplicas text,
+ status_updatedReplicas text,
+ status_replicas text,
+ status_unavailableReplicas text,
+ create_time timestamp default now(),
+ PRIMARY KEY (kube_flatting_time, cluster_id, kind, metadata_uid, row_index)
+);
+----------------------------
CREATE TABLE cmoa_endpoint_addresses(
kube_flatting_time bigint,
kind varchar(30),
@@ -1558,6 +1580,28 @@ CREATE TABLE cmoa_replicaset_base (
PRIMARY KEY (kube_flatting_time, cluster_id, kind, metadata_uid, row_index)
);
----------------------------
+CREATE TABLE cmoa_replicationcontroller_base (
+ kube_flatting_time bigint,
+ cluster_id varchar(255),
+ kind varchar(30),
+ metadata_uid varchar(40),
+ row_index int,
+ kind_status varchar(50),
+ metadata_annotations text,
+ metadata_creationtimestamp varchar(25),
+ metadata_labels text,
+ metadata_name text,
+ metadata_namespace text,
+ metadata_resourceversion text,
+ spec_replicas text,
+ status_availablereplicas text,
+ status_readyreplicas text,
+ status_replicas text,
+ status_observedgeneration text,
+ create_time timestamp default now(),
+ PRIMARY KEY (kube_flatting_time, cluster_id, kind, metadata_uid, row_index)
+);
+----------------------------
CREATE TABLE cmoa_resourcequota_base (
kube_flatting_time bigint,
cluster_id varchar(255),
@@ -1734,3 +1778,17 @@ CREATE TABLE public.tenant_info_auth_resources (
ALTER TABLE public.tenant_info_auth_resources ADD CONSTRAINT fkkecsc13ydhwg8u05aumkqbnx1 FOREIGN KEY (tenant_info_id) REFERENCES public.tenant_info(id);
ALTER TABLE public.tenant_info_auth_resources ADD CONSTRAINT fkpvvec4ju3hsma6s1rtgvr4mf6 FOREIGN KEY (auth_resources_id) REFERENCES public.auth_resource3(id);
+
+create table public.audit_log
+(
+ id bigserial
+ constraint audit_log_pk
+ primary key,
+ created_date timestamp not null,
+ user_id varchar not null,
+ type varchar not null,
+ menu_path varchar not null,
+ result boolean not null,
+ target jsonb
+);
+
diff --git a/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_dml.psql b/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_dml.psql
index e0f3f30..9582c36 100644
--- a/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_dml.psql
+++ b/roles/cmoa_install/files/03-ddl-dml/postgres/postgres_insert_dml.psql
@@ -1291,6 +1291,10 @@ INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,ent
INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_process_write_bytes','Host Process write bytes(KiB)','number of bytes written by this group/1024','sum by (groupname) (rate(namedprocess_namegroup_write_bytes_total{{filter}}[1m]))/1024','Process','Host','','true','false','"Host:{{$labels.instance}} Disk Write Size:{{humanize $value}}KiB|{threshold}KiB."');
INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_process_count','Host Process number','number of processes in this group', 'sum by (groupname) (namedprocess_namegroup_num_procs{{filter}})', 'Process','Host','','true','false','"Host:{{$labels.instance}} Count:{{humanize $value}}|{threshold}."');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_count','Host Count','Host Count','count(node_boot_time_seconds{is_host="true"})','Host','System','','true','false','Host Count : {{humanize $value}}');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_alive','Host Alive','Host Alive','count(node_boot_time_seconds{is_host="true", {filter}})','Host','System','','true','false','Host Alive : {{$labels.instance}}');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('host_dead','Host Dead','Host Dead','absent(node_boot_time_seconds{is_host="true", {filter}})','Host','System','','true','false','Host Dead : {{$labels.instance}}');
+
INSERT INTO public.license_key (id, license_key, set_time, in_used, tenant_id) VALUES (nextval('hibernate_sequence'), 'A46CB0A0870B60DD0EF554F092FB8490C647C4ACCF17177EB0028FEF1B677A1DC86C08219D3D357E55E87B653A9D2F044F9095576ED493CE5D1E180E8843A04BCFE94E500F85491D408CFC7397B82F00063415F4CF8756545B6ED1A38F07F91A7B6D9381B7FC433A5086CDD2D748527ECB42835677199F23F7C8E33A66E8138182DDD76BE4925FA4B1DFD96FD5578FE80C75E0E20D76877BF6FD570265D8E69CAC34795B982CF8D811669894886567E4F5F62E28990953401374B548787E35374BFF201D5C9AD062B326E72F9B1D7791A610DA1BDF1D4F829819BC537E06C8D54F95FB04F2DAC456698F605DE3BBD72E472FC79658C806B188988B053E1E4D96FFFFFF0312983D630FAD5E9160650653074248047030124045265319328119048121312221292096178141356403289033057286071001044254168244430392446457353385472238471183338511051434316333006127241420429465082200161165099271484261287306170426201314452131350327249112310323036187433166345114324280269098441154231174135226128298344425341164290424093450115453299282209144110060155055496368233391148510223372355438125122460232315097083390283180026090507303464176016343147301028053052418046214169100404193398101492126437150008449359062078276386196105011194373118107003376243188284337378334352432479501211364186021040035210237120336302073022394079272002081397132067383497202300181309396185361017436058208454167203412219275329234043427354024133409339470296204490485256467335056F5B2CABD122B376DAEA67944E1CCE6867DF9EB6504C78F817DF9EB6504C78F81BF1E615E6EC6242C9667BD675FC5FA39C6672FE2068E5D1431C6CD04429D07655865E293C1F77ED7A0D33F5556DA6CD3A8EC2774DB04F797CE4A29B0312F75E585D51D7B4DD227EA6BD5278CB9233040E7DD2B30A6D5119959D5B7EAC826D3DA0537EFB5A034A6A1C91A619F4E168F46A455B594C91F058E1E22C7EA2957EED7533D069C335C95B4FA2B53E71A800343EA7F16B05AFBA04635F1FBDE9C81709C27BA075C78FA26311ED3A4A5226EF47FC84C3024999406B47F2098B5983CC3CAF79F92332074B9872E429CBE8EF12D5092628E4D4A39CBDDFCAAB2E382229CF09A5B10243340C1A7A0C5CBC14C704FCE873571524A5B038F1781CD31A4D8E2C48E02E63A2746E668273BE9D63937B88D8C864CE439528EB13BDFAC3E52EE4B8CB75B4ED65A7C97B42E5DAEE3E41D2331B06FFFBA71BECD9B96AEEB969670FC3869CC59050FD6DFA32457195314104022250232266247291151DEFAULT_TENANT', now(), true, 'DEFAULT_TENANT');
insert into public.license_key2 (id, license_key, set_time, cluster_id, license_used) values (nextval('hibernate_sequence'), 'D041F44269EAFF1AF7C37ACAA86B7D9CBED89547431E777B797220CF62FE5D6A27C66BEBEAB8F4C89EA5379009C90CDEBFFAE307B7AEB897DC4D8CEAB61654340BB746B0B46679A9FB4791C777BAEBA176308F6BEB1654CE43D4E80E6D0F80CEC00B1EC30E7DA4BB8D3159133EF98AEB50617107DB77BE94676E0D4AA04ADA3B11A66824DB89A60C52BC1AB92926F10189DBBA6210B31478F48CF87B5D754F1A7C6BED0D1637742179DBF7BE82B3B3357AEA82CFAAD9126E39C4E19BABCB1CBDDB816C86A8F7C476D963265720383B627800775B0C9116D67CE5CB7CFC71D0A8A36623965EBB18A5BE1816FB1FAAAEAC361D2ABBC7344EC0B6C61E0395115B13FFFFFF03DEF34E840F2ED2AC84AC44DF368362366124308470063002498494067338303241077065122260378200508377102354337080160182150254091118451110391059070094162363290186239455351194330333503046082379128006166220287276298120398066372099177432015458270176242025196335311342039022343475412085392206244005184417460227292375103433217376511140361223163316121467443014486278407389237024349111268136424371062035285300509195050441367478101310353464249250399393211468032382017479033204215420319027225173414447170427346074048078201158299332476339297492269181214328291096331271222221199421106169418137405411466364104047152090465446480302462385088114481261428257207129020358100073347153355274495263056109229159157348228275180360410147142130230179450079472482323145202198010119F9BFDDF3C203A7E537AB046811BB7CEA37AB046811BB7CEA37AB046811BB7CEAE012403885A8163C0E3E14D7AD6207B5E8CE91579501D84B09D6682339A4DB462F479FFE1B232AFB3D19E925768AF0AA3E62D9AB6F9CEADDB1CDCA351CAA90996631814A556C47270431A6A40891F756FDDCA7BDD05C62A2932F8E77979E0D43C9F12565B1F4BB4F0520B44CC76BAC23F65330AC5966D22B209F32126132F4848E500A013F4DC32306A9620394D40C94B8EBC2406B68EBE31DAB17EF2DF977731A5C41C11311DC36E1FB8BC2529D1AA20D5D46919472212D781B1D77378872CBD14C2A5B783C7ADF0D2680946C52E56E186A7E971E7EAB2CF09511361DD892B5D4A113E8A2C60E3F7FEFA4100753D82B7064101002937733CE0285C73130635F0CBBDF6F1160C2917B2DF9B1C391A8E9D7D9F380BF31A77A84017D0DF26B35BED6B2D145A051EB4345DA90241CA997828B8393ACD5C7316594634356CCC3986EFDD7776AC62C65E500ED125097142489479219130046503035CloudMOA', now(), null, true);
@@ -2455,17 +2459,18 @@ INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,ent
INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('nginx_http_requests_total','Nginx Http Requests','Http request count of Nginx','sum by (instance) (irate(nginx_http_requests_total{ {filter} }[1m]))','Request','Nginx','','true','false','"Http requests : {{$value}}|{threshold}"');
-- Kube Event 관련 metric meta 추가
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_node','Kubernetes Node Event','Kubernetes Node Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Node"})','Event','Node','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_job','Kubernetes Job Event','Kubernetes Job Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Job"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_cronjob','Kubernetes CronJob Event','Kubernetes CronJob Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="CronJob"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_daemonset','Kubernetes DaemonSet Event','Kubernetes DaemonSet Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="DaemonSet"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_deployment','Kubernetes Deployment Event','Kubernetes Deployment Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Deployment"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_endpoints','Kubernetes Endpoints Event','Kubernetes Endpoints Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Endpoints"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_persistentvolumeclaim','Kubernetes PersistentVolumeClaim Event','Kubernetes PersistentVolumeClaim Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="PersistentVolumeClaim"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_pod','Kubernetes Pod Event','Kubernetes Pod Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Pod"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_warning','Kubernetes Warning Event','Kubernetes Warning Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{type="Warning"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Name: {{$involved_name}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_replicaset','Kubernetes ReplicaSet Event','Kubernetes ReplicaSet Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="ReplicaSet"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_statefulset','Kubernetes StatefulSet Event','Kubernetes StatefulSet Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="StatefulSet"})','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_node','Kubernetes Node Event','Kubernetes Node Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Node", {filter} })','Event','Node','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_job','Kubernetes Job Event','Kubernetes Job Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Job", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_cronjob','Kubernetes CronJob Event','Kubernetes CronJob Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="CronJob", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_daemonset','Kubernetes DaemonSet Event','Kubernetes DaemonSet Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="DaemonSet", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_deployment','Kubernetes Deployment Event','Kubernetes Deployment Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Deployment", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_deploymentconfig','Kubernetes DeploymentConfig Event','Kubernetes DeploymentConfig Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="DeploymentConfig", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_endpoints','Kubernetes Endpoints Event','Kubernetes Endpoints Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Endpoints", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_persistentvolumeclaim','Kubernetes PersistentVolumeClaim Event','Kubernetes PersistentVolumeClaim Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="PersistentVolumeClaim", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_pod','Kubernetes Pod Event','Kubernetes Pod Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="Pod", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_warning','Kubernetes Warning Event','Kubernetes Warning Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{type="Warning", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Name: {{$involved_name}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_replicaset','Kubernetes ReplicaSet Event','Kubernetes ReplicaSet Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="ReplicaSet", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
+INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('kube_event_statefulset','Kubernetes StatefulSet Event','Kubernetes StatefulSet Event','count by (xm_clst_id,xm_namespace,entity_type,involved_kind,involved_name,reason,type) (imxc_kubernetes_event_in_last_min{involved_kind="StatefulSet", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} ');
-- Event Alert 관련 metric meta 추가
INSERT INTO public.metric_meta2 (id,meta_name,description,expr,resource_type,entity_type,groupby_keys,in_use,anomaly_score,message) VALUES('event_alert_successful_create','Event Alert SuccessfulCreate','Event Alert SuccessfulCreate','count by (xm_clst_id, xm_namespace, entity_type, involved_kind, involved_name, reason, type) (imxc_kubernetes_event_in_last_min{reason=~"SuccessfulCreate", {filter} })','Event','Workload','','true','false','CLST:{{$labels.xm_clst_id}} NS:{{$labels.xm_namespace}} Reason : {{$labels.reason}} Level : {{$labels.type}} Name : {{$labels.involved_name}} ');
diff --git a/roles/cmoa_install/files/05-imxc/templates/noti-server.yaml b/roles/cmoa_install/files/05-imxc/templates/noti-server.yaml
index 99c7a5b..93b019e 100755
--- a/roles/cmoa_install/files/05-imxc/templates/noti-server.yaml
+++ b/roles/cmoa_install/files/05-imxc/templates/noti-server.yaml
@@ -98,9 +98,21 @@ spec:
value: {{ .Values.global.IMXC_NAMESPACE }}
- name: IMXC_ALERT_KUBERNETES_NAMESPACE
value: {{ .Values.global.IMXC_NAMESPACE }}
+
# log4j
- name: LOG4J_FORMAT_MSG_NO_LOOKUPS
value: "true"
+
+ # service-now
+ - name: IMXC_ALERT_NOTIFICATION_SERVICE-NOW_ENABLED
+ value: "false"
+ - name: IMXC_ALERT_NOTIFICATION_SERVICE-NOW_KEY
+ value: ""
+ - name: IMXC_ALERT_NOTIFICATION_SERVICE-NOW_AUTH_ID
+ value: ""
+ - name: IMXC_ALERT_NOTIFICATION_SERVICE-NOW_AUTH_PASSWORD
+ value: ""
+
resources:
requests:
memory: "100Mi"
diff --git a/roles/cmoa_install/tasks/00-default-settings-master.yml b/roles/cmoa_install/tasks/00-default-settings-master.yml
index 4a17c4a..7f2cd20 100644
--- a/roles/cmoa_install/tasks/00-default-settings-master.yml
+++ b/roles/cmoa_install/tasks/00-default-settings-master.yml
@@ -10,7 +10,7 @@
kubernetes.core.k8s:
state: present
namespace: "{{ item }}"
- src: "{{ role_path }}/files/00-default/secret_nexus.yaml"
+ src: "{{ role_path }}/files/00-default/secret_{{ REPO }}.yaml"
apply: yes
with_items:
- "{{ cmoa_namespace }}"
diff --git a/roles/cmoa_install/tasks/01-storage-install.yml b/roles/cmoa_install/tasks/01-storage-install.yml
index 7c34969..19f3b67 100644
--- a/roles/cmoa_install/tasks/01-storage-install.yml
+++ b/roles/cmoa_install/tasks/01-storage-install.yml
@@ -20,11 +20,9 @@
- "{{ role_path }}/files/01-storage/{{item}}/values.yaml"
values:
image:
- repository: "{{ REGISTRY }}/cmoa3/minio"
- imagePullSecrets:
- - name: "{{ PullSecret }}"
+ repository: "{{ REGISTRY }}/minio"
mcImage:
- repository: "{{ REGISTRY }}/cmoa3/mc"
+ repository: "{{ REGISTRY }}/mc"
with_items:
- minio
diff --git a/roles/cmoa_install/tasks/02-base-install.yml b/roles/cmoa_install/tasks/02-base-install.yml
index 79b7ca1..3cecc43 100644
--- a/roles/cmoa_install/tasks/02-base-install.yml
+++ b/roles/cmoa_install/tasks/02-base-install.yml
@@ -18,11 +18,12 @@
- 01-coredns.yaml
- name: 3. Change Registry Address
- command: "sed -i 's/0.0.0.0/{{ REGISTRY }}/g' {{ role_path }}/files/02-base/base/charts/{{ item }}/values.yaml"
+ command: "sed -i 's#0.0.0.0#{{ REGISTRY }}#g' {{ role_path }}/files/02-base/base/charts/{{ item }}/values.yaml"
with_items:
- cortex
- rabbitmq
- redis
+ - kafka-ui
- name: 4. helmchart install (base)
kubernetes.core.helm:
@@ -36,10 +37,10 @@
- "{{ role_path }}/files/02-base/{{item}}/values.yaml"
values:
global:
- IMXC_IN_REGISTRY: "{{ REGISTRY }}/cmoa3"
+ IMXC_IN_REGISTRY: "{{ REGISTRY }}"
rabbitmq:
image:
- registry: "{{ REGISTRY }}/cmoa3"
+ registry: "{{ REGISTRY }}"
tag: v1.0.0
with_items:
- base
@@ -82,8 +83,9 @@
apply: yes
- name: 8. Change Registry Address - origin
- command: "sed -i 's/{{ REGISTRY }}/0.0.0.0/g' {{ role_path }}/files/02-base/base/charts/{{ item }}/values.yaml"
+ command: "sed -i 's#{{ REGISTRY }}#0.0.0.0#g' {{ role_path }}/files/02-base/base/charts/{{ item }}/values.yaml"
with_items:
- cortex
- rabbitmq
- redis
+ - kafka-ui
diff --git a/roles/cmoa_install/tasks/04-keycloak-install.yml b/roles/cmoa_install/tasks/04-keycloak-install.yml
index 2d12338..96d6010 100644
--- a/roles/cmoa_install/tasks/04-keycloak-install.yml
+++ b/roles/cmoa_install/tasks/04-keycloak-install.yml
@@ -11,12 +11,10 @@
- "{{ role_path }}/files/04-keycloak/values.yaml"
values:
image:
- repository: "{{ REGISTRY }}/cmoa3/keycloak"
- imagePullSecrets:
- - name: "{{ PullSecret }}"
+ repository: "{{ REGISTRY }}/keycloak"
extraInitContainers: |
- name: theme-provider
- image: "{{ REGISTRY }}/cmoa3/theme-provider:latest"
+ image: "{{ REGISTRY }}/theme-provider:latest"
imagePullPolicy: IfNotPresent
command:
- sh
@@ -30,7 +28,7 @@
mountPath: /theme
pgchecker:
image:
- repository: "{{ REGISTRY }}/cmoa3/busybox"
+ repository: "{{ REGISTRY }}/busybox"
tag: 1.32
pullPolicy: Always
with_items:
diff --git a/roles/cmoa_install/tasks/05-imxc-install.yml b/roles/cmoa_install/tasks/05-imxc-install.yml
index fd8f835..dc1b102 100644
--- a/roles/cmoa_install/tasks/05-imxc-install.yml
+++ b/roles/cmoa_install/tasks/05-imxc-install.yml
@@ -11,9 +11,9 @@
- "{{ role_path }}/files/05-imxc/values.yaml"
values:
image:
- repository: "{{ REGISTRY }}/cmoa3/nginx"
+ repository: "{{ REGISTRY }}/nginx"
global:
- IMXC_IN_REGISTRY: "{{ REGISTRY }}/cmoa3"
+ IMXC_IN_REGISTRY: "{{ REGISTRY }}"
with_items:
- imxc
diff --git a/roles/cmoa_install/tasks/06-imxc-ui-install.yml b/roles/cmoa_install/tasks/06-imxc-ui-install.yml
index 52c1cdf..ab115ce 100644
--- a/roles/cmoa_install/tasks/06-imxc-ui-install.yml
+++ b/roles/cmoa_install/tasks/06-imxc-ui-install.yml
@@ -11,9 +11,9 @@
- "{{ role_path }}/files/06-imxc-ui/{{ item }}/values.yaml"
values:
image:
- repository: "{{ REGISTRY }}/cmoa3/nginx"
+ repository: "{{ REGISTRY }}/nginx"
global:
- IMXC_IN_REGISTRY: "{{ REGISTRY }}/cmoa3"
+ IMXC_IN_REGISTRY: "{{ REGISTRY }}"
with_items:
- imxc-ui
ignore_errors: true
diff --git a/roles/cmoa_os_setting/defaults/main.yml b/roles/cmoa_os_setting/defaults/main.yml
index 89d4134..54a134d 100644
--- a/roles/cmoa_os_setting/defaults/main.yml
+++ b/roles/cmoa_os_setting/defaults/main.yml
@@ -15,6 +15,12 @@ kubernetes_calico_manifest_file: "{{ role_path }}/files/calico.yaml"
kubernetes_metric_server_file: "{{ role_path }}/files/components.yaml"
+
+crio:
+ version: 1.23
+ os: xUbuntu_20.04
+
+
containerd_config:
version: 2
root: /var/lib/containerd
@@ -110,7 +116,7 @@ containerd_config:
- https://registry-1.docker.io
"10.10.31.243:5000":
endpoint:
- - http://10.10.31.243:5000
+ - "http://10.10.31.243:5000"
x509_key_pair_streaming:
tls_cert_file: ""
tls_key_file: ""