Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release for v0.7.8 #912

Merged
merged 7 commits into from
Jun 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ jobs:
ignore:
- gh-pages
docker:
- image: googleapis/nox:0.17.0
- image: googleapis/nox:0.18.2
- image: mysql:5.7
environment:
MYSQL_ROOT_HOST: "%"
Expand Down
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,16 @@

## Unreleased

## 0.7.8
Released 2020-06-17

- Updated `azure` module
([#903](https://github.com/census-instrumentation/opencensus-python/pull/903),
[#902](https://github.com/census-instrumentation/opencensus-python/pull/902))

## 0.7.7
Released 2020-02-04

- Updated `azure` module
([#837](https://github.com/census-instrumentation/opencensus-python/pull/837),
[#845](https://github.com/census-instrumentation/opencensus-python/pull/845),
Expand Down
4 changes: 4 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,15 @@ OpenCensus - A stats collection and distributed tracing framework
=================================================================

|gitter|
|travisci|
|circleci|
|pypi|
|compat_check_pypi|
|compat_check_github|


.. |travisci| image:: https://travis-ci.org/census-instrumentation/opencensus-python.svg?branch=master
:target: https://travis-ci.org/census-instrumentation/opencensus-python
.. |circleci| image:: https://circleci.com/gh/census-instrumentation/opencensus-python.svg?style=shield
:target: https://circleci.com/gh/census-instrumentation/opencensus-python
.. |gitter| image:: https://badges.gitter.im/census-instrumentation/lobby.svg
Expand Down
11 changes: 10 additions & 1 deletion contrib/opencensus-ext-azure/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,17 @@

## Unreleased

## 1.0.3
Released 2020-06-17

- Change default path of local storage
([#903](https://github.com/census-instrumentation/opencensus-python/pull/903))
- Add support to initialize azure exporters with proxies
([#902](https://github.com/census-instrumentation/opencensus-python/pull/902))


## 1.0.2
Released 2020-02-03
Released 2020-02-04

- Add local storage and retry logic for Azure Metrics Exporter
([#845](https://github.com/census-instrumentation/opencensus-python/pull/845))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,17 @@
# limitations under the License.

import os
import sys
import tempfile

from opencensus.ext.azure.common.protocol import BaseObject

INGESTION_ENDPOINT = 'ingestionendpoint'
INSTRUMENTATION_KEY = 'instrumentationkey'
TEMPDIR_PREFIX = "opencensus-python-"


def process_options(options):
# Connection string/ikey
code_cs = parse_connection_string(options.connection_string)
code_ikey = options.instrumentation_key
env_cs = parse_connection_string(
Expand All @@ -46,6 +48,17 @@ def process_options(options):
or 'https://dc.services.visualstudio.com'
options.endpoint = endpoint + '/v2/track'

# storage path
if options.storage_path is None:
TEMPDIR_SUFFIX = options.instrumentation_key or ""
options.storage_path = os.path.join(
tempfile.gettempdir(),
TEMPDIR_PREFIX + TEMPDIR_SUFFIX
)

if options.proxies is None:
options.proxies = '{}'


def parse_connection_string(connection_string):
if connection_string is None:
Expand Down Expand Up @@ -95,15 +108,10 @@ def __init__(self, *args, **kwargs):
logging_sampling_rate=1.0,
max_batch_size=100,
minimum_retry_interval=60, # minimum retry interval in seconds
proxy=None,
proxies=None, # string maps url schemes to the url of the proxies
storage_maintenance_period=60,
storage_max_size=100*1024*1024,
storage_path=os.path.join(
os.path.expanduser('~'),
'.opencensus',
'.azure',
os.path.basename(sys.argv[0]) or '.console',
),
storage_max_size=50*1024*1024, # 50MiB
storage_path=None,
storage_retention_period=7*24*60*60,
timeout=10.0, # networking timeout in seconds
)
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def __init__(self, src, dst):

def run(self): # pragma: NO COVER
# Indicate that this thread is an exporter thread.
# Used to suppress tracking of requests in this thread.
execution_context.set_is_exporter(True)
src = self.src
dst = self.dst
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
import datetime
import json
import random
import logging
import os
import random

from opencensus.common.schedule import PeriodicTask

logger = logging.getLogger(__name__)


def _fmt(timestamp):
return timestamp.strftime('%Y-%m-%dT%H%M%S.%f')
Expand All @@ -22,25 +25,23 @@ class LocalFileBlob(object):
def __init__(self, fullpath):
self.fullpath = fullpath

def delete(self, silent=False):
def delete(self):
try:
os.remove(self.fullpath)
except Exception:
if not silent:
raise
pass # keep silent

def get(self, silent=False):
def get(self):
try:
with open(self.fullpath, 'r') as file:
return tuple(
json.loads(line.strip())
for line in file.readlines()
)
except Exception:
if not silent:
raise
pass # keep silent

def put(self, data, lease_period=0, silent=False):
def put(self, data, lease_period=0):
try:
fullpath = self.fullpath + '.tmp'
with open(fullpath, 'w') as file:
Expand All @@ -56,8 +57,7 @@ def put(self, data, lease_period=0, silent=False):
os.rename(fullpath, self.fullpath)
return self
except Exception:
if not silent:
raise
pass # keep silent

def lease(self, period):
timestamp = _now() + _seconds(period)
Expand All @@ -77,7 +77,7 @@ class LocalFileStorage(object):
def __init__(
self,
path,
max_size=100*1024*1024, # 100MB
max_size=50*1024*1024, # 50MiB
maintenance_period=60, # 1 minute
retention_period=7*24*60*60, # 7 days
write_timeout=60, # 1 minute
Expand All @@ -87,11 +87,11 @@ def __init__(
self.maintenance_period = maintenance_period
self.retention_period = retention_period
self.write_timeout = write_timeout
self._maintenance_routine(silent=False)
# Run maintenance routine once upon instantiating
self._maintenance_routine()
self._maintenance_task = PeriodicTask(
interval=self.maintenance_period,
function=self._maintenance_routine,
kwargs={'silent': True},
)
self._maintenance_task.daemon = True
self._maintenance_task.start()
Expand All @@ -106,19 +106,18 @@ def __enter__(self):
def __exit__(self, type, value, traceback):
self.close()

def _maintenance_routine(self, silent=False):
def _maintenance_routine(self):
try:
if not os.path.isdir(self.path):
os.makedirs(self.path)
except Exception:
if not silent:
raise
# Race case will throw OSError which we can ignore
pass
try:
for blob in self.gets():
pass
except Exception:
if not silent:
raise
pass # keep silent

def gets(self):
now = _now()
Expand Down Expand Up @@ -161,12 +160,39 @@ def get(self):
pass
return None

def put(self, data, lease_period=0, silent=False):
def put(self, data, lease_period=0):
if not self._check_storage_size():
return None
blob = LocalFileBlob(os.path.join(
self.path,
'{}-{}.blob'.format(
_fmt(_now()),
'{:08x}'.format(random.getrandbits(32)), # thread-safe random
),
))
return blob.put(data, lease_period=lease_period, silent=silent)
return blob.put(data, lease_period=lease_period)

def _check_storage_size(self):
size = 0
for dirpath, dirnames, filenames in os.walk(self.path):
for f in filenames:
fp = os.path.join(dirpath, f)
# skip if it is symbolic link
if not os.path.islink(fp):
try:
size += os.path.getsize(fp)
except OSError:
logger.error(
"Path %s does not exist or is inaccessible.", fp
)
continue
if size >= self.max_size:
logger.warning(
"Persistent storage max capacity has been "
"reached. Currently at %fKB. Telemetry will be "
"lost. Please consider increasing the value of "
"'storage_max_size' in exporter config.",
format(size/1024)
)
return False
return True
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import json
import logging

import requests

logger = logging.getLogger(__name__)
Expand All @@ -25,12 +26,12 @@ def _transmit_from_storage(self):
# give a few more seconds for blob lease operation
# to reduce the chance of race (for perf consideration)
if blob.lease(self.options.timeout + 5):
envelopes = blob.get() # TODO: handle error
envelopes = blob.get()
result = self._transmit(envelopes)
if result > 0:
blob.lease(result)
else:
blob.delete(silent=True)
blob.delete()

def _transmit(self, envelopes):
"""
Expand All @@ -40,6 +41,8 @@ def _transmit(self, envelopes):
Return the next retry time in seconds for retryable failure.
This function should never throw exception.
"""
if not envelopes:
return 0
try:
response = requests.post(
url=self.options.endpoint,
Expand All @@ -49,9 +52,15 @@ def _transmit(self, envelopes):
'Content-Type': 'application/json; charset=utf-8',
},
timeout=self.options.timeout,
proxies=json.loads(self.options.proxies),
)
except requests.Timeout:
logger.warning(
'Request time out. Ingestion may be backed up. Retrying.')
return self.options.minimum_retry_interval
except Exception as ex: # TODO: consider RequestException
logger.warning('Transient client side error %s.', ex)
logger.warning(
'Retrying due to transient client side error %s.', ex)
# client side error (retryable)
return self.options.minimum_retry_interval

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.

__version__ = '1.0.2'
__version__ = '1.0.3'
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def __init__(self, src, dst):

def run(self):
# Indicate that this thread is an exporter thread.
execution_context.set_is_exporter(True)
# Used to suppress tracking of requests in this thread.
src = self._src
dst = self._dst
while True:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

class ProcessorTimeMetric(object):
NAME = "\\Processor(_Total)\\% Processor Time"

@staticmethod
def get_value():
cpu_times_percent = psutil.cpu_times_percent()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

class AvailableMemoryMetric(object):
NAME = "\\Memory\\Available Bytes"

@staticmethod
def get_value():
return psutil.virtual_memory().available
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

class ProcessMemoryMetric(object):
NAME = "\\Process(??APP_WIN32_PROC??)\\Private Bytes"

@staticmethod
def get_value():
try:
Expand Down
11 changes: 11 additions & 0 deletions contrib/opencensus-ext-azure/tests/test_azure_log_exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,17 @@ def test_invalid_sampling_rate(self):
logging_sampling_rate=4.0,
)

def test_init_handler_with_proxies(self):
handler = log_exporter.AzureLogHandler(
instrumentation_key='12345678-1234-5678-abcd-12345678abcd',
proxies='{"https":"https://test-proxy.com"}',
)

self.assertEqual(
handler.options.proxies,
'{"https":"https://test-proxy.com"}',
)

@mock.patch('requests.post', return_value=mock.Mock())
def test_exception(self, requests_mock):
logger = logging.getLogger(self.id())
Expand Down
Loading