What's the best way to store access token generated from third party API in AWS
I'm currently running a script locally to generate reports using GoogleAdsManager API. Prior running the script I've created new service account key in json format as the key type together with ~/googleads.yaml
.
Here's the dev guide.
However, I wanted to schedule this script on (AWS Glue
).
This is the sample script and the issue I currently faced is :
How do call this method ad_manager.AdManagerClient.LoadFromStorage() from aws? I've stored the credentials (JSON
and YAML
) in AWS Secrets Manager
from googleads import ad_manager, oauth2 import tempfile import _locale _locale._getdefaultlocale = (lambda *args: ['en_US', 'UTF-8']) ad_unit_id = XXXXXXXXXX def generate_ad_impressions(client): # Initialize appropriate service. report_service = client.GetService("ReportService", version="v202108") # Initialize a DataDownloader. report_downloader = client.GetDataDownloader(version="v202108") # Create statement object to filter for an order. statement = ( ad_manager.StatementBuilder(version="v202108") .Where("PARENT_AD_UNIT_ID = :id") .WithBindVariable("id", mbns_aa_vod_ad_unit_id) .Limit(None) # No limit/offset for reports .Offset(None) ) report_job = { "reportQuery": { "dimensions": ["DATE", "HOUR"], "columns": [ "AD_SERVER_IMPRESSIONS", ], "dateRangeType": "TODAY", "startDate": { "year": "2022", "month": "1", "day": "25" }, "endDate": { "year": "2022", "month": "1", "day": "25" }, "statement": statement.ToStatement(), } } try: # Run the report and wait for it to finish. report_job_id = report_downloader.WaitForReport(report_job) except: print("Failed to generate report.") # Change to your preferred export format. export_format = "CSV_DUMP" # report_file = tempfile.NamedTemporaryFile(suffix=".csv.gz", delete=False) with open('ad_unit_report.csv.gz', mode='wb') as report_file: # Download report data. report_downloader.DownloadReportToFile(report_job_id, export_format, report_file) report_file.close() # Download report data. downloaded_report = report_downloader.DownloadReportToFile(report_job_id, export_format, report_file) report_file.close() print('success!') if __name__ == '__main__': ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage('path_to_yaml_file') generate_ad_impressions(ad_manager_client)
Solution should be something like this:
import boto3 from botocore.exceptions import ClientError from googleads import ad_manager, oauth2 import tempfile import _locale _locale._getdefaultlocale = (lambda *args: ['en_US', 'UTF-8']) ad_unit_id = XXXXXXXXXX def get_secret(): secret_name = "MySecret" region_name = "us-west-2" session = boto3.session.Session() client = session.client( service_name='secretsmanager', region_name=region_name, ) try: get_secret_value_response = client.get_secret_value( SecretId=secret_name ) except ClientError as e: if e.response['Error']['Code'] == 'ResourceNotFoundException': print("The requested secret " + secret_name + " was not found") elif e.response['Error']['Code'] == 'InvalidRequestException': print("The request was invalid due to:", e) elif e.response['Error']['Code'] == 'InvalidParameterException': print("The request had invalid params:", e) elif e.response['Error']['Code'] == 'DecryptionFailure': print("The requested secret can't be decrypted using the provided KMS key:", e) elif e.response['Error']['Code'] == 'InternalServiceError': print("An error occurred on service side:", e) else: # Secrets Manager decrypts the secret value using the associated KMS CMK # Depending on whether the secret was a string or binary, only one of these fields will be populated if 'SecretString' in get_secret_value_response: text_secret_data = get_secret_value_response['SecretString'] return text_secret_data else: binary_secret_data = get_secret_value_response['SecretBinary'] return binary_secret_data def generate_ad_impressions(client): # Initialize appropriate service. report_service = client.GetService("ReportService", version="v202108") # Initialize a DataDownloader. report_downloader = client.GetDataDownloader(version="v202108") # Create statement object to filter for an order. statement = ( ad_manager.StatementBuilder(version="v202108") .Where("PARENT_AD_UNIT_ID = :id") .WithBindVariable("id", mbns_aa_vod_ad_unit_id) .Limit(None) # No limit/offset for reports .Offset(None) ) report_job = { "reportQuery": { "dimensions": ["DATE", "HOUR"], "columns": [ "AD_SERVER_IMPRESSIONS", ], "dateRangeType": "TODAY", "startDate": { "year": "2022", "month": "1", "day": "25" }, "endDate": { "year": "2022", "month": "1", "day": "25" }, "statement": statement.ToStatement(), } } try: # Run the report and wait for it to finish. report_job_id = report_downloader.WaitForReport(report_job) except: print("Failed to generate report.") # Change to your preferred export format. export_format = "CSV_DUMP" # report_file = tempfile.NamedTemporaryFile(suffix=".csv.gz", delete=False) with open('ad_unit_report.csv.gz', mode='wb') as report_file: # Download report data. report_downloader.DownloadReportToFile(report_job_id, export_format, report_file) report_file.close() # Download report data. downloaded_report = report_downloader.DownloadReportToFile(report_job_id, export_format, report_file) report_file.close() print('success!') if __name__ == '__main__': ad_manager_client = ad_manager.AdManagerClient.LoadFromString(get_secret()) generate_ad_impressions(ad_manager_client)Here the function get_secret fetches the YAML string from AWS Secrets Manager stored as MySecret in region us-west-2. The YAML string is then subsequently used to create the Google Ad Manager Client using the function LoadFromString.
Note that I have not tested this as I do not have Google API key and Id.
Thanks for sharing the input. It is useful but I got an error message after i ran the script. The error logs are as follow: data = self.stream.read(size) AttributeError: 'NoneType' object has no attribute 'read'
Can you share the full stack trace? Want to know the error is from which line number.
I decided to go with parameter store like the suggested solution given above. But in the yaml file we need to provide full path to the private key file. For testing purpose i stored the file in my s3 and provide the path. But in the error logs it says the file does not exist. Thanks for your help!
Looks like a permissions issue. Did you check IAM policy?
It works like a charm. Thanks!