I needed to backup some sftp sites and mysql from a remote server to my local server at home. Piece of cake in Python.
After that I add it as a Jenkins script to schedule periodic backups:
import shutil import os import paramiko import pysftp import select import logging logging.basicConfig(level=logging.DEBUG) def sftp_backup(ssh_host=None, ssh_username=None, ssh_password=None, source_directory=None, local_directory=None): with pysftp.Connection(ssh_host, username=ssh_username, password=ssh_password, log=True) as sftp: sftp.chdir(source_directory) # first remove the local directory to make room try: logging.info('Removing local directory: {}'.format(local_directory)) shutil.rmtree(local_directory) logging.info('Done removing local directory: {}'.format(local_directory)) except: logging.info('Can\'t delete {}. Probably does not exist'.format(local_directory)) # then create the directory if not os.path.exists(local_directory): logging.info('Creating empty local directory: {}'.format(local_directory)) os.makedirs(local_directory) logging.info('Done creating local directory: {}'.format(local_directory)) # recursively copy to local_directory logging.info('Starging to download from {} to {}'.format(source_directory, local_directory)) sftp.get_r('', local_directory) logging.info('Done') def dump_mysql_to_file_via_ssh(ssh_host=None, ssh_user=None, ssh_password=None, mysql_host=None, mysql_user=None, mysql_password=None, mysql_databases=None, output_file='dump.sql'): logging.debug('dump_mysql_to_file_via_ssh') ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname=ssh_host, username=ssh_user, password=ssh_password) stdin, stdout, stderr = ssh.exec_command('mysqldump --host={mysql_host} -u {mysql_user} -p{mysql_password} --databases {mysql_databases}'.format( mysql_host=mysql_host, mysql_user=mysql_user, mysql_password=mysql_password, mysql_databases=mysql_databases )) logging.info('Begin writing to file {}'.format(output_file)) file = open(output_file, 'wb') # Wait for the command to terminate while not stdout.channel.exit_status_ready(): # Only print data if there is data to read in the channel if stdout.channel.recv_ready(): rl, wl, xl = select.select([stdout.channel], [], [], 0.0) if len(rl) > 0: # Print data from stdout r = stdout.channel.recv(1024) file.write(str(r)) file.close() logging.info('Done writing to file.') ssh.close() if __name__ == '__main__': dump_mysql_to_file_via_ssh( ssh_host='ssh.example.com', ssh_user='', ssh_password='', mysql_host='', mysql_user='', mysql_password='', mysql_databases='', output_file='sebastiannilsson.com.sql' ) sftp_backup(ssh_host='ssh.example.com', ssh_username='', ssh_password='', source_directory='', local_directory='')
Thanks, it is awesome, but it will generate the file with \n which returns error while importing
the dump has unnecessary lines such as: "b"-- MySQL dump 10.13 Distrib 5.7.14, for Linux (x86_64)\n--\n-- Host: drvl421 Database: scatalogprod\n-- ------------------------------------------------------\n-- Server version\t5.7.17-enterprise-commercial-advanced\n\n/" which return syntax error.
Also, when importing get the following error: PAGER set to stdout
It works for me. Where are you trying to import it? It is the MySQL dump function so it really should work.
I have fixed My problem by changing the following:
1. file = open(output_file, 'wb')
2. file.write(r)
Great stuff! Thanks!
mysql -uroot -p -D scatalogprod < python-scatalogprod-20-Apr-2021.sql
@sebnill i use the above command to import to mydb