Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fully opt-in organization token support #110

Merged
merged 7 commits into from
Nov 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 12 additions & 2 deletions anaconda_anon_usage/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from conda.auxlib.decorators import memoizedproperty
from conda.base.context import Context, ParameterLoader, PrimitiveParameter, context

from .tokens import token_string
from .tokens import system_token, token_string
from .utils import _debug


Expand All @@ -18,7 +18,17 @@ def _new_user_agent(ctx):
getattr(Context, "checked_prefix", None) or context.target_prefix or sys.prefix
)
try:
token = token_string(prefix, context.anaconda_anon_usage)
# If an organization token exists, it overrides the value of
# context.anaconda_anon_usage. For most users, this has no
# effect. But this does provide a system administrator the
# ability to enable telemetry without modifying a user's
# configuration by installing an organization token. The
# effect is similar to placing "anaconda_anon_usage: true"
# in /etc/conda/.condarc.
is_enabled = context.anaconda_anon_usage or system_token()
if is_enabled and not context.anaconda_anon_usage:
_debug("system token overriding the config setting")
jezdez marked this conversation as resolved.
Show resolved Hide resolved
token = token_string(prefix, is_enabled)
if token:
result += " " + token
except Exception: # pragma: nocover
Expand Down
55 changes: 52 additions & 3 deletions anaconda_anon_usage/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,17 @@

import sys
from collections import namedtuple
from os.path import expanduser, join
from os import environ
from os.path import expanduser, isfile, join

from conda.base import constants as c_constants

from . import __version__
from .utils import _debug, _random_token, _saved_token, cached

Tokens = namedtuple("Tokens", ("version", "client", "session", "environment"))
Tokens = namedtuple("Tokens", ("version", "client", "session", "environment", "system"))
CONFIG_DIR = expanduser("~/.conda")
ORG_TOKEN_NAME = "org_token"


@cached
Expand All @@ -24,6 +28,41 @@ def version_token():
return __version__


@cached
def system_token():
"""
Returns the system/organization token. Unlike the other
tokens, it is desirable for this token to be stored in
a read-only/system location, presumably installed
The system/organization token can be stored anywhere
in the standard conda search path. Ideally, an MDM system
would place it in a read-only system location.
"""
# Do not import SEARCH_PATH directly since we need to
# temporarily patch it for testing
for path in c_constants.SEARCH_PATH:
mcg1969 marked this conversation as resolved.
Show resolved Hide resolved
# Only consider directories where
# .condarc could also be found
if not path.endswith("/.condarc"):
continue
mcg1969 marked this conversation as resolved.
Show resolved Hide resolved
parts = path.split("/")
if parts[0].startswith("$"):
parts[0] = environ.get(parts[0][1:])
if not parts[0]:
continue
parts[-1] = ORG_TOKEN_NAME
path = "/".join(parts)
if isfile(path):
try:
_debug("Reading system token: %s", path)
with open(path) as fp:
return fp.read().strip()
except Exception:
_debug("Unable to read system token")
return
_debug("No system token found")


@cached
def client_token():
"""
Expand Down Expand Up @@ -65,7 +104,11 @@ def all_tokens(prefix=None):
Fields: version, client, session, environment
"""
return Tokens(
version_token(), client_token(), session_token(), environment_token(prefix)
version_token(),
client_token(),
session_token(),
environment_token(prefix),
system_token(),
)


Expand All @@ -84,8 +127,14 @@ def token_string(prefix=None, enabled=True):
parts.append("s/" + values.session)
if values.environment:
parts.append("e/" + values.environment)
if values.system:
parts.append("o/" + values.system)
mcg1969 marked this conversation as resolved.
Show resolved Hide resolved
else:
_debug("anaconda_anon_usage disabled by config")
result = " ".join(parts)
_debug("Full client token: %s", result)
return result


if __name__ == "__main__":
print(token_string())
4 changes: 2 additions & 2 deletions anaconda_anon_usage/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def _saved_token(fpath, what, must_exist=None):
_debug("Unexpected error reading: %s\n %s", fpath, exc, error=True)
if len(client_token) < 22:
if len(client_token) > 0:
_debug("Generating longer token")
_debug("Generating longer %s token", what)
client_token = _random_token(what)
status = _write_attempt(must_exist, fpath, client_token, what[0] in WRITE_CHAOS)
if status == WRITE_FAIL:
Expand All @@ -183,6 +183,6 @@ def _saved_token(fpath, what, must_exist=None):
elif status == WRITE_DEFER:
# If the environment has not yet been created we need
# to defer the token write until later.
_debug("Deferring token write")
_debug("Deferring %s token write", what)
DEFERRED.append((must_exist, fpath, client_token, what))
return client_token
21 changes: 21 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import tempfile
from os import remove
from os.path import join

import pytest
from conda.base import constants as c_constants
from conda.base.context import Context, context

from anaconda_anon_usage import tokens, utils
Expand All @@ -12,6 +14,25 @@ def aau_token_path():
return join(tokens.CONFIG_DIR, "aau_token")


@pytest.fixture
def system_token():
with tempfile.TemporaryDirectory() as tname:
tname = tname.replace("\\", "/")
o_path = c_constants.SEARCH_PATH
n_path = (
"/tmp/fake/condarc.d/",
tname + "/.condarc",
tname + "/condarc",
tname + "/condarc.d/",
)
c_constants.SEARCH_PATH = n_path + o_path
rtoken = utils._random_token()
with open(tname + "/org_token", "w") as fp:
fp.write(rtoken)
yield rtoken
c_constants.SEARCH_PATH = o_path


@pytest.fixture(autouse=True)
def token_cleanup(request, aau_token_path):
def _remove():
Expand Down
26 changes: 19 additions & 7 deletions tests/unit/test_tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def test_token_string():
assert "c/" in token_string
assert "s/" in token_string
assert "e/" in token_string
assert "o/" not in token_string


def test_token_string_disabled():
Expand All @@ -33,43 +34,54 @@ def test_token_string_disabled():
assert "c/" not in token_string
assert "s/" not in token_string
assert "e/" not in token_string
assert "o/" not in token_string


def test_token_string_no_client_token(monkeypatch):
def test_token_string_with_system(system_token):
token_string = tokens.token_string()
assert "o/" + system_token in token_string


def test_token_string_no_client_token(monkeypatch, system_token):
def _mock_saved_token(*args, **kwargs):
return ""

monkeypatch.setattr(tokens, "environment_token", lambda prefix: "env_token")
monkeypatch.setattr(tokens, "_saved_token", lambda fpath, what: "")
monkeypatch.setattr(tokens, "_saved_token", _mock_saved_token)

token_string = tokens.token_string()
assert "c/" not in token_string
assert "s/" in token_string
assert "e/env_token" in token_string
assert "o/" + system_token in token_string


def test_token_string_no_environment_token(
monkeypatch,
):
def test_token_string_no_environment_token(monkeypatch, system_token):
monkeypatch.setattr(tokens, "environment_token", lambda prefix: "")

token_string = tokens.token_string()
assert "c/" in token_string
assert "s/" in token_string
assert "e/" not in token_string
assert "o/" + system_token in token_string


def test_token_string_full_readonly(monkeypatch):
def test_token_string_full_readonly(monkeypatch, system_token):
monkeypatch.setattr(utils, "READ_CHAOS", "ce")
monkeypatch.setattr(utils, "WRITE_CHAOS", "ce")
token_string = tokens.token_string()
assert "c/" not in token_string
assert "s/" in token_string
assert "e/" not in token_string
assert "o/" + system_token in token_string


def test_token_string_env_readonly(monkeypatch):
def test_token_string_env_readonly(monkeypatch, system_token):
monkeypatch.setattr(utils, "READ_CHAOS", "e")
monkeypatch.setattr(utils, "WRITE_CHAOS", "e")

token_string = tokens.token_string()
assert "c/" in token_string
assert "s/" in token_string
assert "e/" not in token_string
assert "o/" + system_token in token_string
Loading