Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- class CloneDatabaseService
- attr_accessor :equal
- attr_accessor :all_records
- def self.default_models_for_sinchronization
- @all_records.push('CloneDatabase'.constantize)
- @all_records.push('AttachedFile'.constantize)
- end
- def self.validate_table_withoud_records
- clone_database = CloneDatabase.last
- if clone_database != nil
- start = clone_database.start
- check_extranet = clone_database.check_extranet
- finish = clone_database.finish
- end
- if Rails.application.config.intranet_site
- if clone_database == nil
- CloneDatabase.create(
- start: '2012-10-26 13:29:52',
- check_extranet: nil, finish: nil,
- migration_version: ActiveRecord::Migrator.current_version.to_s
- )
- elsif start != nil && check_extranet != nil && finish != nil
- CloneDatabase.create(
- start: Time.now,
- check_extranet: nil,
- finish: nil,
- migration_version: ActiveRecord::Migrator.current_version.to_s
- )
- end
- else
- if clone_database != nil
- if start != nil && check_extranet == nil && finish == nil
- clone_database.check_extranet = Time.now
- clone_database.save
- end
- end
- end
- end
- def self.is_restore_database(params)
- if params[:model].class == String
- @equal = true
- array_with_tables_to_remove_from_restoration_download = [
- 'SinergiaCache5252Report',
- 'SinergiaCacheResultReport',
- 'SinergiaParam',
- 'SinergiaValue',
- 'CacheSinergiaValue',
- 'CkeditorAsset',
- 'SchemaMigration',
- 'KeyRestoreDatabase'
- ]
- parse_model = JSON.parse(params[:model])
- array_with_tables_to_remove_from_restoration_download.each do |table|
- parse_model.delete(table)
- end
- params[:model] = parse_model
- else
- @equal = false
- end
- end
- def self.validate_auto_reference_association_belongs_to(association_belongs_to)
- copy_association_belongs_to = association_belongs_to
- copy_association_belongs_to.each do |association|
- if association.name == :parent || association.name == :friend
- association_belongs_to.delete(association)
- break
- end
- end
- return association_belongs_to
- end
- def self.validate_auto_reference_association_has_many(association_has_many)
- copy_association_has_many = association_has_many
- copy_association_has_many.each do |association|
- if association.name == :parent || association.name == :friend
- association_has_many.delete(association)
- break
- end
- end
- return association_has_many
- end
- def self.populate_model_collection(model)
- association_belongs_to = model.reflect_on_all_associations(:belongs_to)
- association_has_many = model.reflect_on_all_associations(:has_many)
- association_belongs_to = validate_auto_reference_association_belongs_to(association_belongs_to)
- association_has_many = validate_auto_reference_association_has_many(association_has_many)
- association_belongs_to.each do |association|
- unless @all_records.include?(association.name.to_s.singularize.camelize.constantize)
- populate_model_collection(association.name.to_s.camelize.constantize)
- end
- end
- # name.to_s.singularize.capitalize.constantize
- unless @all_records.include?(model.name.to_s.singularize.camelize.constantize)
- @all_records << model.name.to_s.singularize.camelize.constantize
- end
- association_has_many.each do |association|
- unless @all_records.include?(association.name.to_s.singularize.camelize.constantize)
- populate_model_collection(association.name.to_s.singularize.camelize.constantize)
- end
- end
- end
- def self.build_collection_of_models(models_for_mapping)
- @all_records = []
- models_for_mapping.each do |model|
- populate_model_collection(model.constantize)
- end
- end
- def self.find_vinculate_record(attached_file)
- file_write = File.new("#{Rails.root.join('uploads/cache').to_s}/#{attached_file.attached_file_id}.json", 'w+')
- value = Base64.strict_encode64(
- File.read(Rails.root.join("uploads/store/#{attached_file.attached_file_id}").to_s)
- )
- file_write.puts value
- file_write.close
- name = {name: attached_file.attached_file_id}
- name
- end
- def self.check_if_exists_attached_file(model, files)
- AttachedFile.all.each do |attached_file|
- if attached_file.model_type == model.to_s &&
- model.exists?(attached_file.model_id) &&
- File.exist?(Rails.root.join("uploads/store/#{attached_file.attached_file_id}").to_s)
- files << find_vinculate_record(attached_file)
- end
- end
- files
- end
- def self.build_object(model, download)
- name_file_and_value = {}
- if @equal
- if model.column_names.include?('intranet_only') && Rails.application.config.intranet_site
- name_file_and_value[:value] = model.all.where('intranet_only = ?', false)
- else
- name_file_and_value[:value] = model.all
- end
- else
- if Rails.application.config.intranet_site
- if model.column_names.include?('intranet_only')
- name_file_and_value[:value] = model.all.where('updated_at > ? AND intranet_only = ?', download.start, false)
- else
- name_file_and_value[:value] = model.all.where('updated_at > ?', download.start)
- end
- else
- name_file_and_value[:value] = model.all.where('updated_at > ?', download.start)
- end
- end
- name_file_and_value[:name] = model.to_s
- create_file(name_file_and_value[:name], name_file_and_value[:value])
- name = {name: model.to_s}
- name
- end
- def self.build_files(all_files, records)
- all_files.each do |file|
- if File.exist?(Rails.root.join("uploads/store/#{file}").to_s)
- file_write = Base64.strict_encode64(File.read(Rails.root.join("uploads/store/#{file}").to_s))
- name = {name: file}
- create_file(file, file_write)
- records << name
- end
- end
- records
- end
- def self.find_records(model, download)
- all_models = []
- if @equal
- all_models = model.all
- else
- all_models = model.all.where('updated_at > ?', download.start)
- end
- all_models
- end
- def self.get_files(model, records, download)
- all_records = find_records(model, download)
- if all_records.count != 0
- all_files = []
- all_records.each do |record|
- if record.title_image_id != nil
- all_files << record.title_image_id
- end
- end
- return build_files(all_files, records)
- else
- return records
- end
- end
- def self.create_file(name, files)
- file_write = File.new("#{Rails.root.join('uploads/cache').to_s}/#{name}.json", 'w+')
- count_comma = 0
- max_regs_fsync = 1000
- if files.class != String
- file_write.print '['
- files.each do |file|
- if count_comma > 0
- file_write.print ','
- end
- #TODO: OTIMIZAR USO DE MEMORIA, GASTO ATUAL: 3878699 BYTES
- file_write.print file.to_json
- # XXX: grava em disco
- if (count_comma > 0 && count_comma % max_regs_fsync == 0)
- file_write.fsync
- end
- count_comma += 1
- end
- file_write.print ']'
- else
- file_write.print files
- # XXX: grava em disco
- file_write.fsync
- end
- file_write.close
- name
- end
- def self.build_videos(all_videos, records)
- all_videos.each do |video|
- if File.exist?(Rails.root.join("uploads/store/#{video}").to_s)
- file = Base64.strict_encode64(File.read(Rails.root.join("uploads/store/#{video}").to_s))
- name = {name: video}
- create_file(video, file)
- records << name
- file.close
- end
- end
- records
- end
- def self.get_videos(model, records, download)
- all_records = find_records(model, download)
- if all_records.count != 0
- all_videos = []
- all_records.each do |record|
- if record.video_file_id != nil
- all_videos << record.video_file_id
- end
- end
- return build_videos(all_videos, records)
- else
- return records
- end
- end
- def self.all_objects(params)
- validate_table_withoud_records
- is_restore_database(params)
- build_collection_of_models(params[:model])
- files = []
- download = CloneDatabase.last
- unless download.nil?
- default_models_for_sinchronization
- @all_records.each do |model|
- files = check_if_exists_attached_file(model, files)
- if model.column_names.include?('title_image_id')
- files << build_object(model, download)
- files = get_files(model, files, download)
- elsif model.name == 'Video'
- files << build_object(model, download)
- files = get_videos(model, files, download)
- else
- files << build_object(model, download)
- end
- end
- end
- files.compact!
- files
- end
- def self.remove_ghost_columns_when_convert_to_json(parse_hash, model)
- columns_name = model.constantize
- columns_name = columns_name.column_names
- new_array_hash = []
- #TODO: OTIMIZAR USO DE MEMORIA, GASTO ATUAL: 6440688 BYTES
- parse_hash.each do |value_hash|
- new_hash = {}
- columns_name.each do |column_name|
- new_hash[column_name] = value_hash[column_name]
- end
- new_array_hash << new_hash
- end
- new_array_hash
- end
- def self.remove_files(zip_information)
- zip_information.each do |file_name|
- File.delete("#{Rails.root.join('uploads/cache').to_s}/#{file_name[:name]}.json")
- end
- end
- def self.build_zip(zip_information, database_tables)
- zip_tempfile = Tempfile.new(['', '.zip'])
- begin
- Zip::OutputStream.open(zip_tempfile.path) do |zip|
- zip_information.each do |file|
- zip.put_next_entry("#{file[:name]}.json")
- array = database_tables
- #TODO pesquisar metodo para leitura gradual
- #TODO: OTIMIZAR USO DE MEMORIA, GARANTIR QUE ESTE FILE.OPEN SERA FECHADO
- File.open("#{Rails.root.join('uploads/cache').to_s}/#{file[:name]}.json", 'r') do |file_read|
- if array.include?(file[:name])
- #TODO: OTIMIZAR USO DE MEMORIA, GASTO ATUAL: 3275653 BYTES
- string_file = file_read.gets
- parse_for_hash = JSON.parse(string_file)
- # this validation is necessary why when hash is converted to json with the function 'to_json'
- # generate keys inexistents 'ghost'
- # That when go atualization the ghost keys not found in database
- new_hash = remove_ghost_columns_when_convert_to_json(parse_for_hash, file[:name])
- zip.print new_hash.to_json
- else
- zip.print file_read.gets
- end
- file_read.close
- end
- end
- # XXX: fecha zip stream
- zip.close
- end
- # XXX: Nao mais necessario
- # zip_data = File.read(zip_tempfile.path)
- ensure
- # XXX: Nao pode apagar o arquivo temporario
- # zip_tempfile.close
- # apaga os arquivos JSON gerados
- # remove_files(zip_information)
- end
- # XXX: Retorna o arquivo zip temporario
- # zip_data
- return zip_tempfile
- end
- end
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement