| # == Schema Information |
| # Schema version: 1 |
| # |
| # Table name: sites |
| # |
| # id :integer(11) not null, primary key |
| # title :string(40) default(), not null |
| # description :text |
| # site_type :string(1) |
| # baseline_id :integer(10) |
| # site_id_baseline_process :integer(10) |
| # created_on :datetime |
| # updated_on :datetime |
| # html_files_count :integer(11) |
| # wikifiable_files_count :integer(11) |
| # user_id :integer(11) |
| # content_scanned_on :datetime |
| # folder :string(200) default(), not null |
| # |
| |
| # A Site can be a published website from EPF or a Wiki, which is an enhanced |
| # published website from EPF. The published websites from EPF that are not |
| # enhanced are also referred to as baseline processes. Baseline processes are |
| # used to create or update Wiki sites. |
| # |
| # Creation or update of a Wiki is a two step process for performance reasons. |
| # This way the second step can be performed using a job that runs at night. |
| # |
| # Therefore, #new_wiki is the first step of creation of a new Wiki. #wikify |
| # does the actual 'wikifying'. To update a Wiki the first step is to set |
| # the baseline_process attribute. The actual update is done by #update_wiki |
| # |
| # More information: |
| # * {EPF Wiki Data model}[link:files/doc/DATAMODEL.html] |
| #--###################################################################### |
| # Copyright (c) 2006 LogicaCMG |
| # |
| # All rights reserved. This program and the accompanying materials |
| # are made available under the terms of the Eclipse Public License v1.0 |
| # which accompanies this distribution, and is available at |
| # http://www.eclipse.org/legal/epl-v10.html |
| # |
| # Contributors: |
| # |
| # Onno van der Straaten:: initial implementation |
| #++###################################################################### |
| # {Copyright (c) 2006 LogicaCMG}[link:files/COPYRIGHT.html] |
| |
| class Site < ActiveRecord::Base |
| |
| has_many :comments |
| has_many :versions |
| has_many :checkouts |
| has_many :difference_analyses |
| belongs_to :baseline |
| belongs_to :user |
| has_and_belongs_to_many :pages |
| has_and_belongs_to_many :baselines |
| |
| # TODO remove already defined above |
| # has_many :difference_analyses, :class_name => "DifferenceAnalysis", :foreign_key => "site_id" |
| |
| has_many :used_in_difference_analyses, :class_name => 'DifferenceAnalysi', :foreign_key => 'site_id_from' |
| |
| # Baseline process to create or update a Wiki |
| belongs_to :baseline_process, :class_name => "Site", :foreign_key => "site_id_baseline_process" |
| |
| validates_presence_of :user_id, :title, :folder |
| validates_format_of :site_type, :with => /W|S/ |
| validates_format_of :folder, :message => 'should consist of letters, digits and underscores', :with => /^([0-9A-Za-z_-])([0-9A-Za-z_-])*[0-9A-Za-z_-]$/ |
| |
| # Not baseline_id because id is only present after save for 'static sites' |
| # See also #validate_on_create |
| validates_presence_of :baseline |
| |
| # required when using #new_baseline_process |
| attr_accessor :baseline_baseline, :baseline_description |
| |
| # during creation of a new static Site this stores the zip-file that contains the content |
| attr_accessor :file |
| |
| # We don't want this view logic here but because we cannot use url_for in emails, |
| # this is the workaround |
| attr_accessor :url |
| |
| # this style element is added to default.css of every wiki, see #wikify and #update_wiki |
| # NOTE: the text '!important' is for cross browser support, the lines with !important are |
| # read by FireFox, the other lines are read by IE |
| TOOLBAR_STYLE = ['#toolbar {position:absolute;', |
| 'right: 110px!important;', 'right:50px', 'top: 11px;', |
| 'width: 200px;', 'height: 10px;','z-index: 100','}'].join("\n") |
| |
| # CSS-file to add toolbar style too, see #enhance_files |
| DEFAULT_CSS = 'css/default.css' |
| |
| # Modified treebrowser Javascript lib to use in each site, see #enhance_files |
| TREEBROWSER_JS = 'public/javascripts/treebrowser.js' |
| |
| # A wikifiable file is a HTML file |
| HTML_FILE_PATTERN = /.*.htm(l)?/i |
| |
| # A wikifiable is not a Wiki file (a version file created using the Wiki) |
| WIKI_FILE_PATTERN = /(.)*wiki(.)*/i |
| |
| # HTML Editors remove the nowrap attribute, so we add it to the CSS-file |
| CSS_PAGETITLE_PATTERN = /.pageTitle(.)*\{(.)*\}/im |
| CSS_PAGETITLE = ['.pageTitle', '{', |
| 'background: #9999cc;', |
| 'color: #ffffff;', |
| 'font-size: 12pt;', |
| 'font-weight: bold;', |
| 'padding-bottom: 5px;', |
| 'padding-left: 10px;', |
| 'padding-right: 10px;', |
| 'padding-top: 5px;', |
| 'text-align: left;', |
| 'white-space: nowrap;','}'].join("\n") |
| |
| # Method #new_upload to process a upload of content for a baseline process. |
| # This does not create a baseline process, for this #new_baseline_process is used. |
| # TODO directly after upload, create new site, causes error, very strange |
| def self.new_upload(params = nil) |
| site = Site.new(params) |
| site.errors.add(:folder, 'can\'t be blank') if site.folder.blank? |
| site.errors.add(:file, 'can\'t be blank') if site.file.original_filename.blank? |
| |
| # FIXME can't convert nil to string |
| site.errors.add(:folder, 'already exists') if File.exists?(site.path) && !site.folder.blank? |
| if site.errors.empty? |
| site.folder = site.folder |
| logger.debug("Writing upload zip to #{site.path2zip}") |
| File.open(site.path2zip, "wb") { |f| f.write(site.file.read) } |
| site.unzip_upload |
| end |
| return site |
| end |
| |
| # Method #new_baseline_process to create a new Site. This will also define a Baseline |
| # NOTE: the content is not scanned yet, see #scan4content |
| def self.new_baseline_process(params = nil) |
| site = Site.new(params) |
| site.site_type = 'S' |
| if !site.folder.nil? && File.exists?(site.path) |
| site.html_files_count = Site.files_html(site.path).size |
| wikifiable_files = site.files_wikifiable |
| site.wikifiable_files_count = wikifiable_files.size |
| #TODO the following causes an error when there are no files |
| site.baseline = Baseline.new(:buildid => buildid(File.ctime(wikifiable_files[0])), :baseline => site.baseline_baseline, :description => site.baseline_description ) |
| end |
| return site |
| end |
| |
| def self.folders_with_unused_baseline_processes |
| sites_path = "#{ENV['EPFWIKI_ROOT_DIR']}public/#{ENV['EPFWIKI_SITES_FOLDER']}" |
| File.makedirs(sites_path) |
| entries = Dir.entries(sites_path) - ['.', '..', 'compare', '.svn'] |
| folders = entries.collect {|entry| entry if File.ftype(File.expand_path(entry, sites_path)) == 'directory'} |
| sites = Site.find_baseline_processes |
| usedFolders = sites.collect {|aSite| aSite.path.gsub("#{ENV['EPFWIKI_ROOT_DIR']}public/#{ENV['EPFWIKI_SITES_FOLDER']}/",'')} |
| return folders.compact - usedFolders |
| end |
| |
| # Method #scan4content |
| # 1. scans the Site folder for pages that can be wikified and |
| # 2. and associates the Page to the Site using Page.find_or_new and |
| # 3. if the site is static, completes the definition of a Baseline by associating the Page records also it as well. |
| def scan4content |
| logger.info("Scanning content in site #{self.title}") |
| raise 'Scanning content is only supported for baseline_processes' if self.wiki? |
| files = self.files_wikifiable |
| files.each do |aFile| |
| page = Page.find_or_new({:rel_path => aFile.gsub(self.path + '/', '')}, self) |
| self.pages << page if !self.pages.include?(page) |
| end |
| self.content_scanned_on = Time.now |
| if self.baseline_process? |
| if self.baseline.pages_count == 0 |
| # TODO: we have to split up this transaction |
| # because it causes MySQL to chrash when there are 6000 or something more records? |
| # Maybe this will work with InnoDB? My ISAM this problem reported |
| self.baseline.pages = self.pages |
| end |
| end |
| self.save! |
| end |
| |
| def content_scanned? |
| return !self.content_scanned_on.nil? |
| end |
| |
| # Method #new_wiki is first step of creation of a new Wiki based |
| # on a baseline process. Method #wikify |
| # For performance reasons this is implemented as a two step process: this creates |
| # a 'pending' Wiki site, #wikify creates the the wikified content |
| def self.new_wiki(params = nil) |
| wiki = Site.new(params) |
| logger.info("Creating wiki " + wiki.title) |
| wiki.site_type = "W" |
| if !wiki.baseline_process.nil? |
| wiki.baseline = wiki.baseline_process.baseline |
| wiki.wikifiable_files_count = wiki.baseline_process.wikifiable_files_count |
| wiki.html_files_count = wiki.baseline_process.html_files_count |
| end |
| return wiki |
| end |
| |
| # Method #wikify does the actual wikifying of the content. It is the second step of the two step |
| # process, the first step was performed using #new_wiki. |
| # * copies content of the source Site (parent) into that folder |
| # * scans the content of the baseline process if this was not done yet, see #scan4content |
| # * enhances the files in that site using method #enhance_files |
| # * creates a relation between the Baseline and the Site |
| def wikify |
| logger.info("Wikifying " + self.title) |
| raise 'The site is not a pending wiki site!' if self.status != 'P' |
| #raise "A baseline process cannot be wikified" if self.status != 'P' |
| #raise "No baseline process specified for this site" if self.baseline_process.nil? |
| raise "Can only update with a baseline process (static site)" if self.baseline_process.wiki? |
| File.makedirs(self.path) |
| logger.info("Copying files from " + self.baseline_process.path + " to " + self.path ) |
| FileUtils.cp_r(self.baseline_process.path + "/.", self.path) # How to copy the contents of a folder and not the folder [http://www.ruby-doc.org/core/classes/FileUtils.html#M001703] |
| |
| self.baseline_process.scan4content if !self.baseline_process.content_scanned_on |
| self.pages << self.baseline_process.pages |
| # NOTE: the parent.path already has a trailing slash |
| enhance_files |
| self.baselines << self.baseline_process.baseline unless self.baselines.include?(self.baseline_process.baseline) |
| self.baseline_process = nil |
| end |
| |
| # add checks that update site is static and site to update is a wiki |
| # content of the baseline process is scanned (#scan4content) if this wasn't done yet |
| def update_wiki |
| logger.info("Starting update of wiki #{title}") |
| if self.checkouts.length > 0 |
| logger.error("Could not update site #{title} because of checkouts") |
| raise "Could not update #{title} due to checkouts" |
| elsif !baseline_process |
| logger.error('No baseline specified') |
| raise "No baseline specified for update of #{title}" |
| else |
| logger.info("Copy update site " + baseline_process.path + " to " + self.path) |
| self.baseline_process.copy_to(self, nil) |
| self.baseline_process.scan4content if self.baseline_process.pages.size == 0 |
| baseline_process_pages = self.baseline_process.pages |
| logger.debug("Wiki has no pages") if self.pages.size == 0 |
| logger.debug("Baseline process has no pages") if self.baseline_process.pages.size == 0 |
| self.pages = self.baseline_process.pages | self.pages # assumes that both are not nil |
| logger.info("Enhancing site " + self.baseline_process.path + " to " + self.path) |
| enhance_files |
| self.baseline = self.baseline_process.baseline |
| self.baseline_process = nil |
| self.baselines << self.baseline unless self.baselines.include?(self.baseline) |
| end |
| end |
| |
| def baseline_processes_candidate |
| returning bp_candidate = [] do |
| if self.status == 'W' |
| Site.find_baseline_processes.each do |bp| |
| bp_candidate << bp unless self.baselines.include?(bp.baseline) |
| end |
| end |
| end |
| end |
| |
| def self.find_wikis_pending |
| returning wikis_pending = [] do |
| Site.find_wikis.each do |wiki| |
| wikis_pending << wiki if wiki.status == 'P' |
| end |
| end |
| end |
| |
| def self.find_wikis |
| Site.find_all_by_site_type('W') |
| end |
| |
| def self.find_wikis_update |
| returning wikis_update = [] do |
| Site.find_wikis.each do |wiki| |
| wikis_update << wiki if wiki.status == 'U' |
| end |
| end |
| end |
| |
| def self.find_baseline_processes |
| Site.find_all_by_site_type('S') |
| end |
| |
| # return collection of Site-records that need to scanned |
| def self.find_baseline_processes_2scan |
| returning bp_2scan = [] do |
| Site.find_baseline_processes.each do |bp| |
| bp_2scan << bp if bp.content_scanned_on.blank? |
| end |
| end |
| return Site.find(:all, :conditions => ['site_type="S" and content_scanned_on is null'], :order => "title ASC") |
| end |
| |
| def wiki? |
| return self.site_type == 'W' |
| end |
| |
| def baseline_process? |
| return self.site_type == 'S' |
| end |
| |
| def path |
| return "#{ENV['EPFWIKI_ROOT_DIR']}public/#{ENV['EPFWIKI_WIKIS_FOLDER']}/#{self.folder}" if self.wiki? |
| return "#{ENV['EPFWIKI_ROOT_DIR']}public/#{ENV['EPFWIKI_SITES_FOLDER']}/#{self.folder}" |
| end |
| |
| def rel_path |
| return path.gsub(ENV['EPFWIKI_ROOT_DIR'] + 'public/','') |
| end |
| |
| |
| def path2zip |
| return self.path + '.zip' |
| end |
| |
| def status |
| return 'S' if self.baseline_process? |
| return 'P' if self.pages.size == 0 && !self.baseline_process.nil? |
| return 'U' if self.pages.size > 0 && !self.baseline_process.nil? |
| return 'W' |
| end |
| |
| # #templates returns a collection of Page records. |
| # These Page records are templates for creating new pages. |
| # These templates are stored in #templates_dir. |
| def templates |
| somePages = Array.new |
| entries = Dir.entries(ENV['EPFWIKI_TEMPLATES_DIR']) - [".", "..", ".svn"] |
| entries.each do |entry| |
| path = self.templates_dir + '/' + entry |
| rel_path = path.gsub(self.path + '/','') |
| path_source = (ENV['EPFWIKI_TEMPLATES_DIR'] + entry) |
| if !File.exists?(path) |
| logger.info("Template " + path + " does not exist, copy from " + path_source) |
| File.makedirs(self.templates_dir) |
| File.copy(path_source, path) |
| Page.enhance_file(path) |
| end |
| page = Page.find_or_new({:rel_path => rel_path}, self) |
| version = Version.find_current_version(self, page, true) |
| page.save! |
| version.save! |
| somePages << page |
| end |
| return somePages |
| end |
| |
| def difference_analyses2 |
| return DifferenceAnalysis.find(:all, :conditions => ['baseline_id=? or baseline_id_from=?', self.baseline_id, self.baseline_id], :order => "created_on ASC") |
| end |
| |
| # #templates_dir returns the path to the folder where the site templates are stored. |
| def templates_dir |
| return path + "/wiki/templates" |
| end |
| |
| # #new_files_dir returns the path to the folder where new pages are stored. |
| def new_files_dir |
| return path + "/wiki/new" |
| end |
| |
| def users |
| return User.find(:all, :conditions => ['exists (select * from versions vsn where vsn.site_id = ? and vsn.user_id = users.id) or exists (select * from comments cmt where cmt.user_id = users.id and cmt.site_id = ?)', id, id]) |
| end |
| |
| def versions_count_excluding_baseversions |
| return Version.find(:all, :conditions => ['site_id= ? and version <> 0', id]).length |
| end |
| |
| def versions_count_since(time) |
| return Version.find(:all, :conditions => ['site_id= ? and version <> 0 and created_on > ?', id, time]).length |
| end |
| |
| def comments_count_since(time) |
| return Comment.find(:all, :conditions => ['site_id= ? and created_on > ?', id, time]).length |
| end |
| |
| def unzip_upload |
| raise "Folder #{self.path} exists" if File.exists?(self.path) |
| logger.debug("Unzipping uploaded file using Apache Ant, command: " + unzip_upload_cmdline) |
| cmd = IO.popen(unzip_upload_cmdline, "w+") |
| cmd.close_write |
| write_log("unzip_upload.log", cmd.readlines.compact.join("\n")) |
| end |
| |
| def validate_on_create |
| logger.info("Validating creation of site: #{inspect}") |
| if baseline_process? # static site (or baseline process) |
| logger.info('New static site validation') |
| errors.add(:folder, 'doesn\'t exist') if folder.nil? || !File.exists?(self.path) |
| errors.add(:folder, 'was already used to create a baseline process (static site)') if !folder.nil? && Site.find_all_by_site_type_and_folder('S',folder).size > 0 |
| baseline.errors.each {|attr, e| errors.add(attr, e)} if !baseline.nil? && !baseline.valid? # adding all baseline errors to site so these are displayed |
| elsif status == 'P' # pending! We are creating a new Wiki |
| logger.info('New wiki site validation') |
| errors.add(:folder, 'already exists') if File.exists?("#{ENV['EPFWIKI_WIKIS_PATH']}#{folder}") |
| errors.add(:baseline_process, 'not a baseline process (type is not \'S\'') if !site_id_baseline_process.nil? && baseline_process.site_type != 'S' |
| end |
| end |
| |
| def validate_on_update |
| logger.info("Validating update of site: #{inspect}") |
| site = Site.find(id) # old values from db |
| errors.add(:site_type, 'can\'t be updated for a baseline process') if site.baseline_process? and wiki? |
| if wiki? and !baseline_process.nil? |
| errors.add(:baseline_process, 'is equal to current baseline') if baseline_process.baseline == baseline and site.status != 'P' |
| errors.add(:baseline_process, 'is not a baseline process') if baseline_process.wiki? |
| errors.add(:baseline_process, 'was already used to create or update this site') if site.baselines.index(baseline_process.baseline) |
| end |
| end |
| |
| # array of HTML files that are candate Wikification |
| def self.files_html(path) |
| paths = Array.new |
| (Dir.entries(path) - [".", ".."]).each do |entry| |
| new_path = File.expand_path(entry, path) |
| if FileTest.directory?(new_path) |
| paths = paths + Site.files_html(new_path) |
| else |
| paths << new_path if !HTML_FILE_PATTERN.match(entry).nil? && WIKI_FILE_PATTERN.match(entry).nil? |
| end |
| end |
| return paths |
| end |
| |
| def files_wikifiable |
| # return array of html-files that have a meta-tag "element type" -> rup-files |
| raise 'Path can\'t be blank' if self.path.blank? |
| returning paths = [] do |
| Site.files_html(self.path).each do |path2| |
| paths << path2 unless Page::ELEMENT_TYPE_PATTERN.match(IO.readlines(path2).join("\n")).nil? |
| end |
| end |
| end |
| |
| ########### |
| private |
| ########### |
| |
| # TODO R? test if default_CSS was added already |
| def enhance_files |
| #html_iframe_fragment = IO.readlines(ENV['EPFWIKI_IFRAME_FRAGMENT_FILE_PATH']).join # TODO: remove |
| File.copy(ENV['EPFWIKI_ROOT_DIR'] + TREEBROWSER_JS, self.path + "/scripts/treebrowser.js" ) |
| html = IO.readlines(self.path + '/' + DEFAULT_CSS).join + "\n" + TOOLBAR_STYLE |
| html = html.gsub(CSS_PAGETITLE_PATTERN.match(html).to_s, CSS_PAGETITLE) |
| file = File.new(self.path + '/' + DEFAULT_CSS, 'w') |
| file.puts(html) |
| file.close |
| self.files_wikifiable.each do |file| |
| Page.enhance_file(file) |
| end |
| end |
| |
| # action #copy_to copies the content of a site to another site (theDestSite). Files are overwritten if they exist in the destination site. |
| # NOTE: Ruby does not have a copy + overwrite command? |
| def copy_to(theDestSite, theFolderPath = nil) |
| if theFolderPath |
| (Dir.entries(theFolderPath) - [".", ".."]).each do |aEntry| |
| aPath = File.expand_path(aEntry, theFolderPath) |
| aDestPath = aPath.gsub(self.path, theDestSite.path) |
| if FileTest.directory?(aPath) |
| logger.info("Copying folder " + aPath + " to " + aDestPath) |
| File.makedirs(aDestPath) |
| copy_to(theDestSite, aPath) |
| else |
| if !FileTest.exists?(aDestPath) |
| logger.info("New file copied " + aPath + " to " + aDestPath) |
| File.copy(aPath, aDestPath) |
| else |
| if FileUtils.cmp(aPath, aDestPath) |
| logger.info("Not copied because equal: " + aPath) |
| else |
| logger.info("Overwritten: " + aPath) |
| File.delete(aDestPath) |
| File.copy(aPath, aDestPath) |
| end |
| end |
| end |
| end |
| else |
| logger.info("Copying content from site " + self.title + " to " + theDestSite.title) |
| logger.info("Source folder: " + self.path + ". Destination folder: " + theDestSite.path) |
| copy_to(theDestSite, self.path) |
| end |
| end |
| |
| def unzip_upload_cmdline |
| cmdParameters = Array.new |
| cmdParameters << ENV['EPFWIKI_ROOT_DIR'].split(":")[0] + ":" #drive letter |
| cmdParameters << ENV['EPFWIKI_ROOT_DIR'].gsub("/","\\") |
| cmdParameters << ENV['EPFWIKI_ANT_PATH'] |
| cmdParameters << self.path2zip |
| cmdParameters << self.path |
| return "#{ENV['EPFWIKI_ROOT_DIR']}script/other/unzip_upload.bat #{cmdParameters.join(' ')}" |
| end |
| |
| end |