Skip to content

Commit

Permalink
Merge pull request #1213 from HubSpot/logfetch_bug_fixes
Browse files Browse the repository at this point in the history
Logfetch bug fixes
  • Loading branch information
ssalinas authored Aug 12, 2016
2 parents 7a509ed + 95bc97f commit 4aab7cc
Show file tree
Hide file tree
Showing 6 changed files with 39 additions and 9 deletions.
1 change: 1 addition & 0 deletions scripts/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ Two commands exist for downloading logs.
|-L, --skip-live|Don't search/download live logs|false|
|-U, --use-cache|Don't redownload live logs, prefer the cached version|false|
|--search|Run logsearch on the cache of local files (no downloading)|false|
|-i, --show-file-info|Show the parsed timestamp and file name before printing log lines, even if not in verbose mode|false|
|-V, --verbose|More verbose output|false|
|--silent|No output except for log content, overrides -V|false|

Expand Down
4 changes: 2 additions & 2 deletions scripts/logfetch/cat.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import os
import sys
import subprocess
from logfetch_base import log
from logfetch_base import log, get_timestamp_string
from termcolor import colored

def cat_files(args, all_logs):
log('\n', args, False)
if all_logs:
all_logs.sort()
for filename in all_logs:
log('=> ' + colored(filename, 'cyan') + '\n', args, False)
log(colored(get_timestamp_string(filename) + ' => ' + filename, 'cyan') + '\n', args, not args.show_file_info)
if filename.endswith('.gz'):
cat = subprocess.Popen(['cat', filename], stdout=subprocess.PIPE)
content = subprocess.Popen(['zcat'], stdin=cat.stdout)
Expand Down
3 changes: 3 additions & 0 deletions scripts/logfetch/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,7 @@ def fetch():
parser.add_argument("-L", "--skip-live", dest="skip_live", help="Don't download/search live logs", action='store_true')
parser.add_argument("-U", "--use-cache", dest="use_cache", help="Use cache for live logs, don't re-download them", action='store_true')
parser.add_argument("--search", dest="search", help="run logsearch on the local cache of downloaded files", action='store_true')
parser.add_argument("-i", "--show-file-info", dest='show_file_info', help="Print the file name before printing log lines", action='store_true')
parser.add_argument("-V", "--verbose", dest="verbose", help="Print more verbose output", action='store_true')
parser.add_argument("--silent", dest="silent", help="No stderr (progress, file names, etc) output", action='store_true')
parser.add_argument("-D" ,"--download-only", dest="download_only", help="Only download files, don't unzip or grep", action='store_true')
Expand Down Expand Up @@ -209,6 +210,7 @@ def search():
parser.add_argument("-p", "--file-pattern", dest="file_pattern", help="S3 uploader file pattern")
parser.add_argument("-g", "--grep", dest="grep", help="Regex to grep for (normal grep syntax) or a full grep command")
parser.add_argument("-z", "--local-zone", dest="zone", help="If specified, input times in the local time zone and convert to UTC, if not specified inputs are assumed to be UTC", action="store_true")
parser.add_argument("-i", "--show-file-info", dest='show_file_info', help="Print the file name before printing log lines", action='store_true')
parser.add_argument("-V", "--verbose", dest="verbose", help="Print more verbose output", action='store_true')
parser.add_argument("--silent", dest="silent", help="No stderr (progress, file names, etc) output", action='store_true')

Expand Down Expand Up @@ -277,6 +279,7 @@ def cat():
parser.add_argument("-U", "--use-cache", dest="use_cache", help="Use cache for live logs, don't re-download them", action='store_true')
parser.add_argument("-V", "--verbose", dest="verbose", help="Print more verbose output", action='store_true')
parser.add_argument("--silent", dest="silent", help="No stderr (progress, file names, etc) output", action='store_true')
parser.add_argument("-i", "--show-file-info", dest='show_file_info', help="Print the file name before printing log lines", action='store_true')
parser.add_argument("-D" ,"--download-only", dest="download_only", help="Only download files, don't unzip or grep", action='store_true')

args = parser.parse_args(remaining_argv)
Expand Down
4 changes: 2 additions & 2 deletions scripts/logfetch/grep.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import sys
import subprocess
from logfetch_base import log
from logfetch_base import log, get_timestamp_string
from termcolor import colored

DEFAULT_GREP_COMMAND = 'grep --color=always \'{0}\''
Expand All @@ -13,7 +13,7 @@ def grep_files(args, all_logs):
grep_cmd = grep_command(args)
log(colored('Running grep command ({0})\n'.format(grep_cmd), 'cyan'), args, False)
for filename in all_logs:
log('=> ' + colored(filename, 'cyan') + '\n', args, True)
log(colored(get_timestamp_string(filename) + ' => ' + filename, 'cyan') + '\n', args, not args.show_file_info)
content = subprocess.Popen(['cat', filename], stdout=subprocess.PIPE)
if filename.endswith('.gz'):
zcat = subprocess.Popen('zcat', stdin=content.stdout, stdout=subprocess.PIPE)
Expand Down
34 changes: 30 additions & 4 deletions scripts/logfetch/logfetch_base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import os
import re
import sys
import gzip
import fnmatch
Expand Down Expand Up @@ -28,8 +29,11 @@ def tasks_for_requests(args):
tasks = tasks[0:args.task_count] if hasattr(args, 'task_count') else tasks
all_tasks = all_tasks + tasks
if not all_tasks:
log(colored('No tasks found, check that the request/task you are searching for exists...', 'red'), args, False)
exit(1)
if args.taskId:
log(colored('No tasks found, check that the request/task you are searching for exists...', 'red'), args, False)
exit(1)
else:
log(colored('No tasks found, will try to search at request level', 'yellow'), args, False)
return all_tasks

def log_matches(inputString, pattern):
Expand All @@ -46,7 +50,7 @@ def all_tasks_for_request(args, request):
elif len(active_tasks) == 0:
return historical_tasks
else:
return active_tasks + [h for h in historical_tasks if is_in_date_range(args, int(str(h['updatedAt'])[0:-3]))]
return active_tasks + [h for h in historical_tasks if is_task_in_date_range(args, int(str(h['updatedAt'])[0:-3]), int(str(h['taskId']['startedAt'])[0:-3]))]
else:
return active_tasks

Expand All @@ -64,7 +68,29 @@ def is_in_date_range(args, timestamp):
if args.end:
return False if (timstamp_datetime < args.start or timstamp_datetime > args.end) else True
else:
return False if timedelta.days < args.start else True
return False if timstamp_datetime < args.start else True

def is_task_in_date_range(args, start, end):
start_datetime = datetime.utcfromtimestamp(start)
end_datetime = datetime.utcfromtimestamp(end)
if args.end:
if start_datetime > args.start and start_datetime < args.end:
return True
elif end_datetime > args.start and end_datetime < args.end:
return True
elif end_datetime > args.end and start_datetime > args.start:
return True
else:
return False
else:
return False if end_datetime < args.start else True

def get_timestamp_string(filename):
timestamps = re.findall(r"-\d{13}-", filename)
if timestamps:
return str(datetime.utcfromtimestamp(int(str(timestamps[-1]).replace("-", "")[0:-3])))
else:
return ""

def update_progress_bar(progress, goal, progress_type, silent):
bar_length = 30
Expand Down
2 changes: 1 addition & 1 deletion scripts/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

setup(
name='singularity-logfetch',
version='0.26.1',
version='0.27.0',
description='Singularity log fetching and searching',
author="HubSpot",
author_email='singularity-users@googlegroups.com',
Expand Down

0 comments on commit 4aab7cc

Please sign in to comment.