Skip to content

Sy/relative imports2 #20467

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
1 change: 0 additions & 1 deletion aerospike/datadog_checks/aerospike/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@


class AerospikeCheckV2(OpenMetricsBaseCheckV2):

__NAMESPACE__ = 'aerospike'

DEFAULT_METRIC_LIMIT = 0
Expand Down
1 change: 0 additions & 1 deletion airflow/tests/compose/dags/tutorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@
catchup=False,
tags=["example"],
) as dag:

# t1, t2 and t3 are examples of tasks created by instantiating operators
t1 = BashOperator(
task_id="print_date",
Expand Down
9 changes: 6 additions & 3 deletions airflow/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,12 @@ def dd_environment(instance):
build=True,
conditions=[CheckEndpoints(URL + "/api/v1/health", attempts=120)],
):
yield instance, {
'docker_volumes': ['{}/datadog.yaml:/etc/datadog-agent/datadog.yaml'.format(temp_dir)],
}
yield (
instance,
{
'docker_volumes': ['{}/datadog.yaml:/etc/datadog-agent/datadog.yaml'.format(temp_dir)],
},
)


@pytest.fixture(scope='session')
Expand Down
3 changes: 0 additions & 3 deletions airflow/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ def test_service_checks_healthy_exp(aggregator, json_resp, expected_healthy_stat
check = AirflowCheck('airflow', common.FULL_CONFIG, [instance])

with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value=None):

with mock.patch('datadog_checks.base.utils.http.requests') as req:
mock_resp = mock.MagicMock(status_code=200)
mock_resp.json.side_effect = [json_resp]
Expand Down Expand Up @@ -60,7 +59,6 @@ def test_service_checks_healthy_stable(
check = AirflowCheck('airflow', common.FULL_CONFIG, [instance])

with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'):

with mock.patch('datadog_checks.base.utils.http.requests') as req:
mock_resp = mock.MagicMock(status_code=200)
mock_resp.json.side_effect = [
Expand Down Expand Up @@ -100,7 +98,6 @@ def test_dag_task_ongoing_duration(aggregator, task_instance):
check = AirflowCheck('airflow', common.FULL_CONFIG, [instance])

with mock.patch('datadog_checks.airflow.airflow.AirflowCheck._get_version', return_value='2.6.2'):

with mock.patch('datadog_checks.base.utils.http.requests') as req:
mock_resp = mock.MagicMock(status_code=200)
mock_resp.json.side_effect = [
Expand Down
2 changes: 1 addition & 1 deletion apache/tests/test_apache.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def test_no_metrics_failure(aggregator, check):
check.check(NO_METRIC_CONFIG)

assert str(excinfo.value) == (
"No metrics were fetched for this instance. Make sure that http://localhost:18180 " "is the proper url."
"No metrics were fetched for this instance. Make sure that http://localhost:18180 is the proper url."
)

sc_tags = ['apache_host:localhost', 'port:18180']
Expand Down
1 change: 0 additions & 1 deletion arangodb/datadog_checks/arangodb/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ class ArangodbCheck(OpenMetricsBaseCheckV2, ConfigMixin):
SERVER_TAGS = {'mode': SERVER_MODE_ENDPOINT, 'id': SERVER_ID_ENDPOINT}

def __init__(self, name, init_config, instances):

super(ArangodbCheck, self).__init__(name, init_config, instances)
self.openmetrics_endpoint = self.instance.get('openmetrics_endpoint')
parsed_endpoint = urlparse(self.openmetrics_endpoint)
Expand Down
2 changes: 0 additions & 2 deletions argo_workflows/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ def instance():
],
)
def test_check_with_fixtures(dd_run_check, aggregator, instance, mock_http_response, fixture_file, description):

mock_http_response(file_path=fixture_file)
check = ArgoWorkflowsCheck('argo_workflows', {}, [instance])
dd_run_check(check)
Expand All @@ -100,7 +99,6 @@ def test_check_with_fixtures(dd_run_check, aggregator, instance, mock_http_respo
aggregator.assert_metric(f'argo_workflows.{m_name}', metric_type=m_type)

if fixture_file == 'tests/fixtures/metricsv3-6+.txt':

for m_name, m_type in V3_6_METRICS:
aggregator.assert_metric(f'argo_workflows.{m_name}', metric_type=m_type)

Expand Down
1 change: 0 additions & 1 deletion aspdotnet/datadog_checks/aspdotnet/aspdotnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

class AspdotnetCheck(PDHBaseCheck):
def __new__(cls, name, init_config, instances):

if not is_affirmative(instances[0].get('use_legacy_check_version', False)):
return AspdotnetCheckV2(name, init_config, instances)
else:
Expand Down
1 change: 0 additions & 1 deletion aws_neuron/datadog_checks/aws_neuron/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ class AwsNeuronCheck(OpenMetricsBaseCheckV2):
DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, instances=None):

super(AwsNeuronCheck, self).__init__(
name,
init_config,
Expand Down
3 changes: 1 addition & 2 deletions azure_iot_edge/tests/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -346,8 +346,7 @@

E2E_METRICS = (
# All metrics...
{name for name, _ in MODULE_METRICS}
.union(name for name, _, _ in AGENT_METRICS)
{name for name, _ in MODULE_METRICS}.union(name for name, _, _ in AGENT_METRICS)
.union(name for name, _ in HUB_METRICS)
# ... Except a few that don't get emitted by default.
.difference(
Expand Down
2 changes: 0 additions & 2 deletions btrfs/datadog_checks/btrfs/btrfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,6 @@ def get_usage(self, mountpoint):
results = []

with FileDescriptor(mountpoint) as fd:

# Get the struct size needed
# https://github.com/spotify/linux/blob/master/fs/btrfs/ioctl.h#L46-L50
ret = sized_array(TWO_LONGS_STRUCT.size)
Expand All @@ -135,7 +134,6 @@ def get_unallocated_space(self, mountpoint):
unallocated_bytes = 0

with FileDescriptor(mountpoint) as fd:

# Retrieve the fs info to get the number of devices and max device id
fs_info = sized_array(BTRFS_FS_INFO_STRUCT.size)
fcntl.ioctl(fd, BTRFS_IOC_FS_INFO, fs_info)
Expand Down
2 changes: 0 additions & 2 deletions btrfs/tests/test_btrfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@


def mock_get_usage():

return [
(1, 9672065024, 9093722112),
(34, 33554432, 16384),
Expand All @@ -25,7 +24,6 @@ def mock_get_usage():
# Just return a single device so the psutil portion of the check doesn't fail
# The real data to check against is in mock_get_usage.
def get_mock_devices():

device_tuple = collections.namedtuple('device_tuple', 'device mountpoint fstype opts')

return [device_tuple(device='/dev/disk1', mountpoint='/', fstype='btrfs', opts='local,multilabel')]
Expand Down
4 changes: 1 addition & 3 deletions cacti/datadog_checks/cacti/cacti.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,7 @@ def _in_whitelist(rrd):
AND dl.snmp_index = hsc.snmp_index
WHERE dt.data_source_path IS NOT NULL
AND dt.data_source_path != ''
AND ({} OR hsc.field_name is NULL) """.format(
and_parameters
)
AND ({} OR hsc.field_name is NULL) """.format(and_parameters)

c.execute(rrd_query)
res = []
Expand Down
1 change: 0 additions & 1 deletion calico/datadog_checks/calico/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ class CalicoCheck(OpenMetricsBaseCheckV2):
DEFAULT_METRIC_LIMIT = 0

def __init__(self, name, init_config, instances=None):

super(CalicoCheck, self).__init__(
name,
init_config,
Expand Down
14 changes: 8 additions & 6 deletions calico/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,14 @@ def setup_calico():

@pytest.fixture(scope='session')
def dd_environment():

with kind_run(
conditions=[setup_calico], kind_config=path.join(HERE, 'kind', 'kind-calico.yaml'), sleep=10
) as kubeconfig, port_forward(kubeconfig, 'kube-system', 9091, 'service', 'felix-metrics-svc') as (
calico_host,
calico_port,
with (
kind_run(
conditions=[setup_calico], kind_config=path.join(HERE, 'kind', 'kind-calico.yaml'), sleep=10
) as kubeconfig,
port_forward(kubeconfig, 'kube-system', 9091, 'service', 'felix-metrics-svc') as (
calico_host,
calico_port,
),
):
endpoint = 'http://{}:{}/metrics'.format(calico_host, calico_port)

Expand Down
1 change: 0 additions & 1 deletion calico/tests/test_calico.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@

@pytest.mark.unit
def test_check(aggregator, dd_run_check, mock_http_response):

mock_http_response(file_path=get_fixture_path('calico.txt'))
check = CalicoCheck('calico', {}, [common.MOCK_CALICO_INSTANCE])
dd_run_check(check)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@


class CassandraNodetoolCheck(AgentCheck):

datacenter_name_re = re.compile('^Datacenter: (.*)')
# 1.
# -- Address Load Tokens Owns Host ID Rack
Expand Down Expand Up @@ -88,7 +87,6 @@ def check(self, _):
percent_total_by_dc = defaultdict(float)
# Send the stats per node and compute the stats per datacenter
for node in nodes:

node_tags = [
'node_address:%s' % node['address'],
'node_id:%s' % node['id'],
Expand Down
3 changes: 0 additions & 3 deletions ceph/tests/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,6 @@ def test_luminous_osd_full_metrics(_, aggregator, dd_run_check):

@mock.patch("datadog_checks.ceph.Ceph._collect_raw", return_value=mock_data("raw.json"))
def test_tagged_metrics(_, aggregator, dd_run_check):

ceph_check = Ceph(CHECK_NAME, {}, [copy.deepcopy(BASIC_CONFIG)])
dd_run_check(ceph_check)

Expand All @@ -137,7 +136,6 @@ def test_tagged_metrics(_, aggregator, dd_run_check):

@mock.patch("datadog_checks.ceph.Ceph._collect_raw", return_value=mock_data("raw2.json"))
def test_osd_perf_with_osdstats(_, aggregator, dd_run_check):

ceph_check = Ceph(CHECK_NAME, {}, [copy.deepcopy(BASIC_CONFIG)])
dd_run_check(ceph_check)

Expand All @@ -150,7 +148,6 @@ def test_osd_perf_with_osdstats(_, aggregator, dd_run_check):

@mock.patch("datadog_checks.ceph.Ceph._collect_raw", return_value=mock_data("ceph_10.2.2.json"))
def test_osd_status_metrics(_, aggregator, dd_run_check):

ceph_check = Ceph(CHECK_NAME, {}, [copy.deepcopy(BASIC_CONFIG)])
dd_run_check(ceph_check)

Expand Down
1 change: 0 additions & 1 deletion cert_manager/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@


def setup_cert_manager():

# Deploy Cert Manager
run_command(
[
Expand Down
1 change: 0 additions & 1 deletion cisco_aci/datadog_checks/cisco_aci/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ def make_request(self, path):


class Api:

wrapper_factory = SessionWrapper

def __init__(
Expand Down
1 change: 0 additions & 1 deletion cisco_aci/datadog_checks/cisco_aci/cisco.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@


class CiscoACICheck(AgentCheck):

HTTP_CONFIG_REMAPPER = {'ssl_verify': {'name': 'tls_verify'}, 'pwd': {'name': 'password'}}
HA_SUPPORTED = True

Expand Down
10 changes: 5 additions & 5 deletions cisco_aci/tests/test_fabric.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,11 +198,11 @@ def assert_fabric_port_ingr_metrics(aggregator):
aggregator.assert_metric(name=metric_name, value=90202911073.0, tags=interface_tags_201_eth2, hostname=hn201)

metric_name = 'cisco_aci.fabric.port.ingr_bytes.unicast'
aggregator.assert_metric(name=metric_name, value=50443812.0, tags=interface_tags_101_eth1, hostname=hn101),
aggregator.assert_metric(name=metric_name, value=70147142.0, tags=interface_tags_101_eth2, hostname=hn101),
aggregator.assert_metric(name=metric_name, value=32704715.0, tags=interface_tags_102_eth1, hostname=hn102),
aggregator.assert_metric(name=metric_name, value=23770059.0, tags=interface_tags_102_eth2, hostname=hn102),
aggregator.assert_metric(name=metric_name, value=105702610.0, tags=interface_tags_201_eth1, hostname=hn201),
aggregator.assert_metric(name=metric_name, value=50443812.0, tags=interface_tags_101_eth1, hostname=hn101)
aggregator.assert_metric(name=metric_name, value=70147142.0, tags=interface_tags_101_eth2, hostname=hn101)
aggregator.assert_metric(name=metric_name, value=32704715.0, tags=interface_tags_102_eth1, hostname=hn102)
aggregator.assert_metric(name=metric_name, value=23770059.0, tags=interface_tags_102_eth2, hostname=hn102)
aggregator.assert_metric(name=metric_name, value=105702610.0, tags=interface_tags_201_eth1, hostname=hn201)
aggregator.assert_metric(name=metric_name, value=29485355.0, tags=interface_tags_201_eth2, hostname=hn201)

metric_name = 'cisco_aci.fabric.port.ingr_bytes.unicast.cum'
Expand Down
3 changes: 1 addition & 2 deletions citrix_hypervisor/tests/test_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,7 @@ def test_lab(aggregator, dd_run_check):
"""
if not is_affirmative(os.environ.get('TEST_CITRIX_RUN_LAB')):
pytest.skip(
"Skipped! Set TEST_CITRIX_RUN_LAB to run this test. "
"TEST_CITRIX_USER and TEST_CITRIX_PASS must also be set."
"Skipped! Set TEST_CITRIX_RUN_LAB to run this test. TEST_CITRIX_USER and TEST_CITRIX_PASS must also be set."
)

username = os.environ['TEST_CITRIX_USER']
Expand Down
36 changes: 20 additions & 16 deletions cloud_foundry_api/tests/test_cloud_foundry_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,9 +97,10 @@ def test_check(_, __, ___, aggregator, instance, dd_events, dd_run_check):
@mock.patch.object(CloudFoundryApiCheck, "get_spaces", return_value={"space_id": "space_name"})
def test_get_events(_, __, ___, instance, dd_events):
scroll_events_mock = mock.MagicMock(return_value=dd_events)
with mock.patch.object(CloudFoundryApiCheck, "scroll_events", scroll_events_mock), mock.patch.object(
CloudFoundryApiCheck, "get_oauth_token"
) as get_oauth_token_mock:
with (
mock.patch.object(CloudFoundryApiCheck, "scroll_events", scroll_events_mock),
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token") as get_oauth_token_mock,
):
check_v2 = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
check_v2._api_version = "v2"
check_v3 = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
Expand Down Expand Up @@ -511,7 +512,6 @@ def test_build_dd_event(_, __, ___, instance):
@mock.patch("datadog_checks.cloud_foundry_api.cloud_foundry_api.get_next_url", side_effect=["next", ""])
@mock.patch.object(CloudFoundryApiCheck, "http")
def test_scroll_api_pages(http_mock, get_next_url_mock, __, ___, ____, aggregator, instance):

check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])

# When exhausting all pages
Expand Down Expand Up @@ -613,17 +613,19 @@ def test_get_orgs(_, __, instance, orgs_v2_p1, orgs_v2_p2, orgs_v3_p1, orgs_v3_p
"321c58b0-777b-472f-812e-c08c53817074": "org_3",
"0ba4c8cb-9e71-4d6e-b6ff-74e301ed6467": "org_4",
}
with mock.patch.object(
CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v2_p1, orgs_v2_p2]
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
with (
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v2_p1, orgs_v2_p2]),
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
):
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
check._api_version = "v2"

assert check.get_orgs() == expected_orgs

with mock.patch.object(
CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v3_p1, orgs_v3_p2]
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
with (
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[orgs_v3_p1, orgs_v3_p2]),
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
):
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
check._api_version = "v3"

Expand All @@ -639,17 +641,19 @@ def test_get_spaces(_, __, instance, spaces_v2_p1, spaces_v2_p2, spaces_v3_p1, s
"d5d005a4-0320-4daa-ac0a-81f8dcd00fe0": "space_3",
"8c7e64bb-0bf8-4a7a-92e1-2fe06e7ec793": "space_4",
}
with mock.patch.object(
CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v2_p1, spaces_v2_p2]
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
with (
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v2_p1, spaces_v2_p2]),
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
):
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
check._api_version = "v2"

assert check.get_spaces() == expected_spaces

with mock.patch.object(
CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v3_p1, spaces_v3_p2]
), mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"):
with (
mock.patch.object(CloudFoundryApiCheck, "scroll_api_pages", return_value=[spaces_v3_p1, spaces_v3_p2]),
mock.patch.object(CloudFoundryApiCheck, "get_oauth_token"),
):
check = CloudFoundryApiCheck('cloud_foundry_api', {}, [instance])
check._api_version = "v3"

Expand Down
14 changes: 8 additions & 6 deletions cloudera/datadog_checks/cloudera/api/api_v7.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,10 @@ def _collect_clusters(self):
# Use len(read_clusters_response.items) * 2 workers since
# for each cluster, we are executing 2 tasks in parallel.
if len(discovered_clusters) > 0:
with ThreadPoolExecutor(max_workers=len(discovered_clusters) * 3) as executor, raising_submitter(
executor
) as submit:
with (
ThreadPoolExecutor(max_workers=len(discovered_clusters) * 3) as executor,
raising_submitter(executor) as submit,
):
for pattern, cluster_name, item, cluster_config in discovered_clusters:
self._log.debug(
"Discovered cluster: [pattern:%s, cluster_name:%s, config:%s]",
Expand Down Expand Up @@ -136,9 +137,10 @@ def _collect_hosts(self, cluster_name, config):
# Use len(discovered_hosts) * 4 workers since
# for each host, we are executing 4 tasks in parallel.
if len(discovered_hosts) > 0:
with ThreadPoolExecutor(max_workers=len(discovered_hosts) * 4) as executor, raising_submitter(
executor
) as submit:
with (
ThreadPoolExecutor(max_workers=len(discovered_hosts) * 4) as executor,
raising_submitter(executor) as submit,
):
for pattern, key, item, config in discovered_hosts:
self._log.debug(
"discovered host: [pattern:%s, key:%s, item:%s, config:%s]", pattern, key, item, config
Expand Down
Loading
Loading