Skip to content

Commit

Permalink
chore: Replace str.format() and string concatenation in samples with …
Browse files Browse the repository at this point in the history
…f-string (#186)

* create bq table fixed

* feat: srting format is adjustetd

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* fix typo

* revert unrelated change

* use project_id instead of project_number

* fix typo

* use project_id instead of project_number

Co-authored-by: tetiana-karasova <tetiana.karasova@gmail.com>
Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
Co-authored-by: Anthonios Partheniou <partheniou@google.com>
  • Loading branch information
4 people authored Mar 10, 2022
1 parent 8c91a33 commit 81724cd
Show file tree
Hide file tree
Showing 16 changed files with 59 additions and 90 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,10 @@ def main(bucket_name):
# TODO: Developer set the bucket_name
# bucket_name = 'user_events_bucket'

default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
project_id
)
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"

gcs_bucket = "gs://{}".format(bucket_name)
gcs_errors_bucket = "{}/error".format(gcs_bucket)
gcs_bucket = f"gs://{bucket_name}"
gcs_errors_bucket = f"{gcs_bucket}/error"
gcs_events_object = "user_events.json"

# TO CHECK ERROR HANDLING USE THE JSON WITH INVALID PRODUCT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,7 @@

project_id = google.auth.default()[1]

default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
project_id
)
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"


# get user events for import
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,16 @@

project_id = google.auth.default()[1]

default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
project_id
)
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"

visitor_id = "test_visitor_id"


# get purge user event request
def get_purge_user_event_request():
purge_user_event_request = PurgeUserEventsRequest()
# TO CHECK ERROR HANDLING SET INVALID FILTER HERE:
purge_user_event_request.filter = 'visitorId="{}"'.format(visitor_id)
purge_user_event_request.filter = f'visitorId="{visitor_id}"'
purge_user_event_request.parent = default_catalog
purge_user_event_request.force = True
print("---purge user events request---")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,16 +24,15 @@

project_id = google.auth.default()[1]

default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
project_id
)
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"

visitor_id = "test_visitor_id"


# get rejoin user event request
def get_rejoin_user_event_request():
# TO CHECK THE ERROR HANDLING TRY TO PASS INVALID CATALOG:
# default_catalog = "projects/{0}/locations/global/catalogs/invalid_catalog".format(project_number)
# default_catalog = f"projects/{project_id}/locations/global/catalogs/invalid_catalog"
rejoin_user_event_request = RejoinUserEventsRequest()
rejoin_user_event_request.parent = default_catalog
rejoin_user_event_request.user_event_rejoin_scope = (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@

project_id = google.auth.default()[1]
timestamp_ = datetime.datetime.now().timestamp().__round__()
bucket_name = "{}_events_{}".format(project_id, timestamp_)
bucket_name = f"{project_id}_events_{timestamp_}"

create_bucket(bucket_name)
upload_blob(bucket_name, "../resources/user_events.json")
upload_blob(bucket_name, "../resources/user_events_some_invalid.json")

print("\nThe gcs bucket {} was created".format(bucket_name))
print(f"\nThe gcs bucket {bucket_name} was created")
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,7 @@
from google.protobuf.timestamp_pb2 import Timestamp

project_id = google.auth.default()[1]
default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
project_id)
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"


# get user event
Expand Down Expand Up @@ -71,7 +70,7 @@ def write_user_event(visitor_id):
# purge user event
def purge_user_event(visitor_id):
purge_user_event_request = PurgeUserEventsRequest()
purge_user_event_request.filter = 'visitorId="{}"'.format(visitor_id)
purge_user_event_request.filter = f'visitorId="{visitor_id}"'
purge_user_event_request.parent = default_catalog
purge_user_event_request.force = True
purge_operation = UserEventServiceClient().purge_user_events(
Expand All @@ -93,17 +92,14 @@ def create_bucket(bucket_name: str):
print("Creating new bucket:" + bucket_name)
buckets_in_your_project = list_buckets()
if bucket_name in buckets_in_your_project:
print("Bucket {} already exists".format(bucket_name))
print(f"Bucket {bucket_name} already exists")
else:
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
bucket.storage_class = "STANDARD"
new_bucket = storage_client.create_bucket(bucket, location="us")
print(
"Created bucket {} in {} with storage class {}".format(
new_bucket.name, new_bucket.location, new_bucket.storage_class
)
)
f"Created bucket {new_bucket.name} in {new_bucket.location} with storage class {new_bucket.storage_class}")
return new_bucket


Expand All @@ -118,9 +114,9 @@ def delete_bucket(bucket_name: str):
blob.delete()
bucket = storage_client.get_bucket(bucket_name)
bucket.delete()
print("Bucket {} is deleted".format(bucket.name))
print(f"Bucket {bucket.name} is deleted")
else:
print("Bucket {} is not found".format(bucket_name))
print(f"Bucket {bucket_name} is not found")


def list_buckets():
Expand All @@ -137,17 +133,14 @@ def upload_blob(bucket_name, source_file_name):
"""Uploads a file to the bucket."""
# The path to your file to upload
# source_file_name = "local/path/to/file"
print("Uploading data form {} to the bucket {}".format(source_file_name,
bucket_name))
print(f"Uploading data from {source_file_name} to the bucket {bucket_name}")
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
object_name = re.search('resources/(.*?)$', source_file_name).group(1)
blob = bucket.blob(object_name)
blob.upload_from_filename(source_file_name)
print(
"File {} uploaded to {}.".format(
source_file_name, object_name
)
f"File {source_file_name} uploaded to {object_name}."
)


Expand Down Expand Up @@ -190,7 +183,7 @@ def delete_bq_table(dataset, table_name):
full_table_id = f"{project_id}.{dataset}.{table_name}"
bq = bigquery.Client()
bq.delete_table(full_table_id, not_found_ok=True)
print("Table '{}' is deleted.".format(full_table_id))
print(f"Table '{full_table_id}' is deleted.")


def upload_data_to_bq_table(dataset, table_name, source, schema_file_path):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,4 +21,4 @@ def update_events_timestamp(json_file):
# Write the file out again
with open(json_file, 'w') as file:
file.write(filedata)
print("The {} is updated".format(json_file))
print(f"The {json_file} is updated")
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,8 @@

project_id = google.auth.default()[1]

default_catalog = "projects/{0}/locations/global/catalogs/default_catalog".format(
project_id
)
default_catalog = f"projects/{project_id}/locations/global/catalogs/default_catalog"

visitor_id = "test_visitor_id"


Expand All @@ -51,8 +50,7 @@ def get_user_event():
# get write user event request
def get_write_event_request(user_event):
# TO CHECK THE ERROR HANDLING TRY TO PASS INVALID CATALOG:
# default_catalog = "projects/{0}/locations/global/catalogs/invalid_catalog"
# .format(project_number)
# default_catalog = f"projects/{project_id}/locations/global/catalogs/invalid_catalog"
write_user_event_request = WriteUserEventRequest()
write_user_event_request.user_event = user_event
write_user_event_request.parent = default_catalog
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@
sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
print(f"No user noxfile_config found: detail: {e}")
TEST_CONFIG_OVERRIDE = {}

# Update the TEST_CONFIG with the user supplied values.
Expand Down Expand Up @@ -222,9 +222,7 @@ def py(session: nox.sessions.Session) -> None:
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
session.skip(
"SKIPPED: {} tests are disabled for this sample.".format(session.python)
)
session.skip(f"SKIPPED: {session.python} tests are disabled for this sample.")


#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def add_fulfillment_places(product_name: str, timestamp, place_id):


create_product(product_id)
print("------add fulfilment places with current date: {}-----".format(current_date))
print(f"------add fulfilment places with current date: {current_date}-----")
add_fulfillment_places(product_name, current_date, "store2")
get_product(product_name)
delete_product(product_name)
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@
+ "/locations/global/catalogs/default_catalog/branches/default_branch"
)
product_id = "".join(random.sample(string.ascii_lowercase, 8))
product_name = "{}/products/{}".format(default_branch_name, product_id)
product_name = f"{default_branch_name}/products/{product_id}"


# generate product for create
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def remove_fulfillment_places(product_name: str, timestamp, store_id):


create_product(product_id)
print("------remove fulfilment places with current date: {}-----".format(current_date))
print(f"------remove fulfilment places with current date: {current_date}-----")
remove_fulfillment_places(product_name, current_date, "store0")
get_product(product_name)
delete_product(product_name)
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@

project_id = google.auth.default()[1]
timestamp_ = datetime.datetime.now().timestamp().__round__()
bucket_name = "{}_products_{}".format(project_id, timestamp_)
bucket_name = f"{project_id}_products_{timestamp_}"

create_bucket(bucket_name)
upload_blob(bucket_name, "../resources/products.json")
upload_blob(bucket_name, "../resources/products_some_invalid.json")

print("\nThe gcs bucket {} was created".format(bucket_name))
print(f"\nThe gcs bucket {bucket_name} was created")
Original file line number Diff line number Diff line change
Expand Up @@ -101,16 +101,14 @@ def create_bucket(bucket_name: str):
print("Creating new bucket:" + bucket_name)
buckets_in_your_project = list_buckets()
if bucket_name in buckets_in_your_project:
print("Bucket {} already exists".format(bucket_name))
print(f"Bucket {bucket_name} already exists")
else:
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
bucket.storage_class = "STANDARD"
new_bucket = storage_client.create_bucket(bucket, location="us")
print(
"Created bucket {} in {} with storage class {}".format(
new_bucket.name, new_bucket.location, new_bucket.storage_class
)
f"Created bucket {new_bucket.name} in {new_bucket.location} with storage class {new_bucket.storage_class}"
)
return new_bucket

Expand All @@ -126,9 +124,9 @@ def delete_bucket(bucket_name: str):
blob.delete()
bucket = storage_client.get_bucket(bucket_name)
bucket.delete()
print("Bucket {} is deleted".format(bucket.name))
print(f"Bucket {bucket.name} is deleted")
else:
print("Bucket {} is not found".format(bucket_name))
print(f"Bucket {bucket_name} is not found")


def list_buckets():
Expand All @@ -145,18 +143,15 @@ def upload_blob(bucket_name, source_file_name):
"""Uploads a file to the bucket."""
# The path to your file to upload
# source_file_name = "local/path/to/file"
print("Uploading data from {} to the bucket {}".format(source_file_name,
bucket_name))
print(f"Uploading data from {source_file_name} to the bucket {bucket_name}")
storage_client = storage.Client()
bucket = storage_client.bucket(bucket_name)
object_name = re.search('resources/(.*?)$', source_file_name).group(1)
blob = bucket.blob(object_name)
blob.upload_from_filename(source_file_name)

print(
"File {} uploaded to {}.".format(
source_file_name, object_name
)
f"File {source_file_name} uploaded to {object_name}."
)


Expand Down Expand Up @@ -199,7 +194,7 @@ def delete_bq_table(dataset, table_name):
full_table_id = f"{project_id}.{dataset}.{table_name}"
bq = bigquery.Client()
bq.delete_table(full_table_id, not_found_ok=True)
print("Table '{}' is deleted.".format(full_table_id))
print(f"Table '{full_table_id}' is deleted.")


def upload_data_to_bq_table(dataset, table_name, source, schema_file_path):
Expand Down
Loading

0 comments on commit 81724cd

Please sign in to comment.