Commit 929e76bc authored by Frank Bergmann's avatar Frank Bergmann

Initial Import

parents
<?xml version="1.0"?>
<!-- Generated by the OpenACS Package Manager -->
<package key="acs-content-repository" url="http://openacs.org/repository/apm/packages/acs-content-repository/" type="apm_service">
<package-name>Content Repository</package-name>
<pretty-plural>Content Repository Services</pretty-plural>
<initial-install-p>t</initial-install-p>
<singleton-p>t</singleton-p>
<version name="5.1.5" url="http://openacs.org/repository/download/apm/acs-content-repository-5.1.5.apm">
<owner url="mailto:dcwickstrom@earthlink.net">Dan Wickstrom</owner>
<summary>The canonical repository for OpenACS content.</summary>
<release-date>2005-02-28</release-date>
<vendor url="http://openacs.org">OpenACS</vendor>
<description format="text/html">Provides the API for creating and managing user generated content including
full support for versioning, rendering content to the filesystem, folders and composite content items, and
other CMS backing functionality. Utilized by Bug Tracker, File Storage, and other packages.
</description>
<provides url="acs-content-repository" version="5.1.5"/>
<requires url="acs-kernel" version="5.1.0"/>
<requires url="acs-service-contract" version="5.1.0"/>
<requires url="search" version="5.1.0"/>
<callbacks>
</callbacks>
<parameters>
<parameter datatype="string" min_n_values="1" max_n_values="1" name="TemplateRoot" default="templates" description="The directory to which templates are published and from where they are retrieved when a page is served. If the value does not start with a '/', it is taken to be relative to [acs_root_dir]. Packages that use the content-repository can override this setting by defining their own TemplateRoot parameter." section_name="templates"/>
</parameters>
</version>
</package>
package com.arsdigita.content;
import java.lang.reflect.*;
import java.sql.*;
import java.util.*;
import java.io.*;
import oracle.sql.*;
import oracle.jdbc.driver.*;
import sqlj.runtime.ref.DefaultContext;
import oracle.sqlj.runtime.Oracle;
import com.oroinc.text.perl.*;
public class Regexp {
/**
Search for a pattern and replace it with another pattern
The patterns may be any valid Perl5 regular expressions
**/
public static boolean replace(int revisionID, String search, String replace)
throws SQLException, IOException, MalformedPerl5PatternException {
Perl5Util util = new Perl5Util();
String content;
#sql { select blob_to_string(content) into :content from cr_revisions
where revision_id = :revisionID };
boolean hasMatch = util.match("/" + search + "/", content);
if (hasMatch) {
String pattern = "s/" + search + "/" + replace + "/";
content = util.substitute(pattern, content);
Integer copyID;
BLOB blob;
#sql {
begin
:OUT copyID := content_revision.copy(:revisionID);
end;
};
#sql { select content into :blob from cr_revisions
where revision_id = :copyID };
if (blob != null)
blob.putBytes(1, content.getBytes());
}
return hasMatch;
}
}
package com.arsdigita.content;
// $Id$
import java.sql.*;
import java.util.*;
import oracle.sql.*;
import java.io.*;
public class Util {
public static void stringToBlob(String s, oracle.sql.BLOB blob, int size)
throws SQLException {
if (s == null) return;
byte[] inBuffer = s.getBytes();
if (size < inBuffer.length) size = inBuffer.length;
byte[] buffer = new byte[size];
System.arraycopy(inBuffer, 0, buffer, 0, inBuffer.length);
blob.putBytes(1, buffer);
}
public static void stringToBlob(String s, oracle.sql.BLOB blob)
throws SQLException {
if (s == null) return;
blob.putBytes(1, s.getBytes());
}
public static String blobToString(oracle.sql.BLOB blob)
throws SQLException {
if (blob == null || blob.length() == 0) return "";
byte[] buffer = new byte[(int) blob.length()];
blob.getBytes(1, (int) blob.length(), buffer);
String s = new String(buffer);
return s;
}
public static void blobToFile(String path, oracle.sql.BLOB blob)
throws SQLException {
try {
File aFile = new File(path);
FileOutputStream aFileOutputStream = new FileOutputStream(aFile);
long blobLength = blob.length();
aFile.mkdirs();
int chunkSize = blob.getChunkSize();
byte[] buffer = new byte[chunkSize];
for(long pos = 1; pos < blobLength; pos += chunkSize) {
chunkSize = blob.getBytes(pos, chunkSize, buffer);
aFileOutputStream.write(buffer, 0, chunkSize);
}
aFileOutputStream.close();
} catch (IOException e) {
System.err.println("Error in writing " + path + ": " + e);
}
}
public static void clobToBlob(oracle.sql.CLOB clob, oracle.sql.BLOB blob)
throws SQLException, IOException {
if (clob == null) {
throw new SQLException("Received null value for clob argument.");
}
if (blob == null) {
throw new SQLException("Received null value for blob argument.");
}
OutputStream outstream = blob.getBinaryOutputStream();
// Get an input stream for the clob
Reader instream = clob.getCharacterStream();
int size = 4096;
char[] buffer = new char[size];
int length = -1;
while ((length = instream.read(buffer)) != -1)
outstream.write((new String(buffer)).getBytes(), 0, length);
instream.close();
outstream.close();
}
// Write a BLOB to a CLOB, assuming the BLOB contains UTF-8 string
public static void blobToClob(oracle.sql.BLOB blob, oracle.sql.CLOB clob)
throws SQLException, IOException {
String s = blobToString(blob);
// Get an output stream for the clob
Writer outstream = clob.getCharacterOutputStream();
outstream.write(s);
outstream.close();
}
}
package com.arsdigita.content;
import java.lang.reflect.*;
import java.sql.*;
import java.util.*;
import java.io.*;
import oracle.sql.*;
import oracle.xml.parser.v2.*;
import oracle.jdbc.driver.*;
import org.w3c.dom.*;
import sqlj.runtime.ref.DefaultContext;
import oracle.sqlj.runtime.Oracle;
import org.xml.sax.SAXException;
#sql iterator TypeIter(String object_type, String table_name,
String id_column);
#sql iterator AttrIter(String attribute_name);
public class XMLExchange {
public static void main(String[] args) throws Exception {
Integer revisionID = new Integer(args[0]);
PrintWriter out
= new PrintWriter(
new BufferedWriter(new OutputStreamWriter(System.out)));
exportRevision(revisionID, out);
}
public static int importRevision(Integer itemID, Integer revisionID,
CLOB loc) throws SQLException, IOException,
XMLParseException, SAXException {
DOMParser parser = new DOMParser();
parser.parse(loc.getCharacterStream());
XMLDocument doc = parser.getDocument();
doc.print(System.out);
XMLElement revision = (XMLElement) doc.getDocumentElement();
// Create the revision
String title = getChildText(revision, "title");
String description = getChildText(revision, "description");
String publishDate = getChildText(revision, "publish_date");
String mimeType = getChildText(revision, "mime_type");
String text = getChildText(revision, "text");
#sql { begin
:OUT revisionID := content_revision.new(
title => :title,
description => :description,
publish_date => to_date(:publishDate),
mime_type => :mimeType,
text => :text,
item_id => content_symlink.resolve(:itemID),
revision_id => :revisionID);
end;
};
// Query for additional tables in which to insert extended attributes
TypeIter typeIter;
#sql typeIter = {
select
object_type, table_name, id_column
from
acs_object_types
where
object_type ^= 'acs_object'
and
object_type ^= 'content_revision'
connect by
prior supertype = object_type
start with
object_type = (
select object_type from acs_objects where object_id = :revisionID
)
order by
level desc
};
String objectType;
String dmlColumns, dmlValues;
ArrayList attributes = new ArrayList();
AttrIter attrIter;
// Use JDBC for the actual insert rather than SQLJ because we need
// to build the DML dynamically
Connection conn = DefaultContext.getDefaultContext().getConnection();
while (typeIter.next()) {
objectType = typeIter.object_type();
dmlColumns = "insert into " + typeIter.table_name() + "(" +
typeIter.id_column();
dmlValues = ") values ( ?";
// query the attributes of the table
#sql attrIter = {
select
attribute_name
from
acs_attributes
where
object_type = :objectType
order by
attribute_name
};
while (attrIter.next()) {
dmlColumns += ", " + attrIter.attribute_name();
dmlValues += ",?";
attributes.add(attrIter.attribute_name());
}
PreparedStatement stmt = conn.prepareStatement(dmlColumns +
dmlValues + ")");
stmt.setInt(1, revisionID.intValue());
for (int i = 0; i < attributes.size(); i++) {
stmt.setString(i + 2,
getChildText(revision, (String) attributes.get(i)));
}
stmt.execute();
stmt.close();
attributes.clear();
}
return revisionID.intValue();
}
// Write XML to a CLOB
public static int exportRevision(Integer revisionID, CLOB loc)
throws SQLException, IOException {
PrintWriter out = new PrintWriter(loc.getCharacterOutputStream());
exportRevision(revisionID, out);
return revisionID.intValue();
}
// Default implementation of a function to write an XML
// representation of a content revision to an output stream.
public static int exportRevision(Integer revisionID, PrintWriter out)
throws SQLException {
try {
XMLDocument doc = new XMLDocument();
// get the content type
String contentType;
String tableName;
String isContentType;
#sql {
select
object_type, table_name, content_type.is_content_type(object_type)
into
:contentType, :tableName, :isContentType
from
acs_object_types
where
object_type = ( select object_type from acs_objects
where object_id = :revisionID )
};
// screeen out non-content-types that are revisioned, such as templates
if (isContentType.equals("f")) return -1;
XMLElement revision = new XMLElement(contentType);
doc.appendChild(revision);
// select attributes for the revision
AttrIter attrIter;
#sql attrIter = {
select
attribute_name
from
( select
object_type, level sort_level
from
acs_object_types
where
object_type <> 'acs_object'
start with
object_type = :contentType
connect by
object_type = prior supertype
) types,
acs_attributes attrs
where
attrs.object_type = types.object_type
order by
types.sort_level desc, attrs.sort_order
};
// build the query to select attributes from the view for the
// content type
String attrQuery = "select revision_id";
while (attrIter.next()) {
attrQuery += ", " + attrIter.attribute_name();
}
attrQuery += " from " + tableName + "x where revision_id = ?";
// select a row from the attribute view for the content type
Connection conn = DefaultContext.getDefaultContext().getConnection();
PreparedStatement stmt = conn.prepareStatement(attrQuery);
stmt.setInt(1, revisionID.intValue());
ResultSet rs = stmt.executeQuery();
ResultSetMetaData md = rs.getMetaData();
if (rs.next()) {
for (int i = 1; i <= md.getColumnCount(); i++) {
// create an XML element for each attribute
String colName = md.getColumnName(i);
String colValue = rs.getString(i);
if (colValue == null) colValue = "";
appendTextOnlyElement(revision, colName, colValue);
}
}
stmt.close();
doc.print(out);
} catch (Exception e) {
throw new SQLException("Failed to generate XML document for revision " +
revisionID + ": " + e);
}
return revisionID.intValue();
}
private static String getChildText(XMLElement element, String name) {
NodeList nodes = element.getChildrenByTagName(name);
if (nodes.getLength() == 0) return null;
// get the text node under this node
Node textNode = nodes.item(0).getFirstChild();
return textNode.getNodeValue();
}
private static void setChildText(XMLElement element, String text) {
XMLText textNode = new XMLText(text);
element.appendChild(textNode);
}
private static void appendTextOnlyElement(XMLElement parent,
String name, String text) {
XMLElement element = new XMLElement(name);
setChildText(element, text);
parent.appendChild(element);
}
}
This diff is collapsed.
-- Registration of ACS Content Repository System.
-- Copyright (C) 1999-2000 ArsDigita Corporation
-- Author: Karl Goldstein (karlg@arsdigita.com)
-- $Id$
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
@@ content-create
This diff is collapsed.
-- Uninstall content repository tables of the ArsDigita Community
-- System
-- Copyright (C) 1999-2000 ArsDigita Corporation
-- Author: Karl Goldstein (karlg@arsdigita.com)
-- $Id$
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
set serveroutput on
-- unregistering types, deleting the default folders
declare
v_id integer;
begin
-- root folder for templates
v_id := content_template.get_root_folder;
content_folder.unregister_content_type(
folder_id => v_id,
content_type => 'content_template',
include_subtypes => 't'
);
content_folder.unregister_content_type(
folder_id => v_id,
content_type => 'content_symlink',
include_subtypes => 't'
);
content_folder.unregister_content_type(
folder_id => v_id,
content_type => 'content_folder',
include_subtypes => 't'
);
content_folder.del(v_id);
-- the root folder for content items
v_id := content_item.get_root_folder;
content_folder.unregister_content_type(
folder_id => v_id,
content_type => 'content_symlink',
include_subtypes => 't'
);
content_folder.unregister_content_type(
folder_id => v_id,
content_type => 'content_folder',
include_subtypes => 't'
);
content_folder.unregister_content_type (
folder_id => v_id,
content_type => 'content_revision',
include_subtypes => 't'
);
content_folder.del (v_id);
end;
/
show errors
begin
content_type.unregister_mime_type(
content_type => 'content_revision',
mime_type => 'text/html');
content_type.unregister_mime_type(
content_type => 'content_revision',
mime_type => 'text/plain');
end;
/
show errors
-- drop all extended attribute tables
--declare
-- cursor type_cur is
-- select object_type, table_name
-- from acs_object_types
-- where table_name <> 'cr_revisions'
-- connect by prior object_type = supertype
-- start with object_type = 'content_revision'
-- order by level desc;
--begin
-- for type_rec in type_cur loop
-- dbms_output.put_line('Dropping ' || type_rec.table_name);
-- execute immediate 'drop table ' || type_rec.table_name;
-- end loop;
--end;
--/
--show errors
-- dropping pl/sql definitions
prompt ** dropping content-image
@@ content-image-drop
-- doc-package-drop
-- content-search-drop
begin
ctx_ddl.drop_section_group('auto');
end;
/
show errors
begin
ctx_ddl.drop_preference('CONTENT_FILTER_PREF');
end;
/
show errors
prompt ** dropping object types
@@ types-drop
-- packages-drop
-- content-package-drop
prompt ** dropping lots of tables
-- content-xml-drop
drop table cr_xml_docs;
drop sequence cr_xml_doc_seq;
-- content-util drop
-- document submission with conversion to html
drop index cr_doc_filter_index;
drop table cr_doc_filter;
--text submission
drop table cr_text;
-- content keywords
drop table cr_item_keyword_map ;
drop table cr_keywords ;
-- content extlinks
drop table cr_extlinks ;
-- content symlinks
drop table cr_symlinks ;
-- content templates
drop table cr_item_template_map ;
drop table cr_type_template_map ;
drop table cr_template_use_contexts ;
drop table cr_templates ;
-- content folders
drop table cr_folder_type_map ;
drop table cr_folders cascade constraints;
prompt ** dropping more tables
-- content publishing
drop table cr_scheduled_release_job;
drop table cr_scheduled_release_log;
drop table cr_release_periods;
drop table cr_item_publish_audit;
-- content revisions
drop table cr_files_to_delete;
drop table cr_content_text;
drop table cr_revision_attributes;
drop table cr_revisions cascade constraints;
-- content_items
drop table cr_item_rels ;
drop table cr_child_rels ;
drop table cr_items cascade constraints;
-- content types
drop table cr_type_relations ;
drop table cr_type_children ;
-- locales
drop table cr_locales ;
-- mime types
drop table cr_content_mime_type_map ;
drop table cr_mime_types ;
-- dropping ats datatypes for cms
begin
delete from acs_datatypes where datatype in ('text');
delete from acs_datatypes where datatype in ('keyword');
delete from acs_datatypes where datatype in ('integer');
commit;
end;
/
show errors
-- Data model to support content repository of the ArsDigita
-- Community System
-- Copyright (C) 1999-2000 ArsDigita Corporation
-- Author: Karl Goldstein (karlg@arsdigita.com)
-- $Id$
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
create or replace package body content_extlink
as
function new (
name in cr_items.name%TYPE default null,
url in cr_extlinks.url%TYPE,
label in cr_extlinks.label%TYPE default null,
description in cr_extlinks.description%TYPE default null,
parent_id in cr_items.parent_id%TYPE,
extlink_id in cr_extlinks.extlink_id%TYPE default null,
creation_date in acs_objects.creation_date%TYPE
default sysdate,
creation_user in acs_objects.creation_user%TYPE
default null,
creation_ip in acs_objects.creation_ip%TYPE default null
) return cr_extlinks.extlink_id%TYPE is
v_extlink_id cr_extlinks.extlink_id%TYPE;
v_label cr_extlinks.label%TYPE;
v_name cr_items.name%TYPE;
begin
if label is null then
v_label := url;
else
v_label := label;
end if;
if name is null then
select acs_object_id_seq.nextval into v_extlink_id from dual;
v_name := 'link' || v_extlink_id;
else
v_name := name;
end if;
v_extlink_id := content_item.new(
item_id => content_extlink.new.extlink_id,
name => v_name,
content_type => 'content_extlink',
creation_date => content_extlink.new.creation_date,
creation_user => content_extlink.new.creation_user,
creation_ip => content_extlink.new.creation_ip,
parent_id => content_extlink.new.parent_id
);
insert into cr_extlinks
(extlink_id, url, label, description)
values
(v_extlink_id, content_extlink.new.url, v_label,
content_extlink.new.description);
return v_extlink_id;
end new;
procedure del (
extlink_id in cr_extlinks.extlink_id%TYPE
) is
begin
delete from cr_extlinks
where extlink_id = content_extlink.del.extlink_id;
content_item.del(content_extlink.del.extlink_id);
end del;
function is_extlink (
item_id in cr_items.item_id%TYPE
) return char
is
v_extlink_p integer := 0;
begin
select
count(1) into v_extlink_p
from
cr_extlinks
where
extlink_id = is_extlink.item_id;
if v_extlink_p = 1 then
return 't';
else
return 'f';
end if;
end is_extlink;
procedure copy (
extlink_id in cr_extlinks.extlink_id%TYPE,
target_folder_id in cr_folders.folder_id%TYPE,
creation_user in acs_objects.creation_user%TYPE,
creation_ip in acs_objects.creation_ip%TYPE default null,
name in cr_items.name%TYPE default null
) is
v_current_folder_id cr_folders.folder_id%TYPE;
v_name cr_items.name%TYPE;
v_url cr_extlinks.url%TYPE;
v_label cr_extlinks.label%TYPE;
v_description cr_extlinks.description%TYPE;
v_extlink_id cr_extlinks.extlink_id%TYPE;
begin
if content_folder.is_folder(copy.target_folder_id) = 't' then
select
parent_id
into
v_current_folder_id
from
cr_items
where
item_id = copy.extlink_id;
select
i.name, e.url, e.label, e.description
into
v_name, v_url, v_label, v_description
from
cr_extlinks e, cr_items i
where
e.extlink_id = i.item_id
and
e.extlink_id = copy.extlink_id;
-- can't copy to the same folder
if copy.target_folder_id ^= v_current_folder_id or (v_name != copy.name and copy.name is not null) then
if copy.name is not null then
v_name := copy.name;
end if;
if content_folder.is_registered(copy.target_folder_id, 'content_extlink') = 't' then
v_extlink_id := content_extlink.new(
parent_id => copy.target_folder_id,
name => v_name,
label => v_label,
description => v_description,
url => v_url,
creation_user => copy.creation_user,
creation_ip => copy.creation_ip
);
end if;
end if;
end if;
end copy;
end content_extlink;
/
show errors
This diff is collapsed.
-- drop the content-image type from the data model
-- Copyright (C) 20000 ArsDigita Corporation
-- $Id$
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
-- unregister mime types from the image type
begin
content_type.unregister_mime_type(
content_type => 'image',
mime_type => 'image/jpeg'
);
content_type.unregister_mime_type(
content_type => 'image',
mime_type => 'image/gif'
);
end;
/
show errors
-- remove image mime types
delete from cr_mime_types where mime_type like 'image%';
-- this should remove the attributes and table related to the
-- the image type
begin
content_type.drop_type (
content_type => 'image',
drop_table_p => 't');
end;
/
show errors
drop package image;
This diff is collapsed.
This diff is collapsed.
-- Data model to support content repository of the ArsDigita
-- Community System
-- Copyright (C) 1999-2000 ArsDigita Corporation
-- Author: Stanislav Freidin (sfreidin@arsdigita.com)
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
create or replace package body content_keyword
as
function get_heading (
keyword_id in cr_keywords.keyword_id%TYPE
) return varchar2
is
v_heading varchar2(4000);
begin
select heading into v_heading from cr_keywords
where keyword_id = content_keyword.get_heading.keyword_id;
return v_heading;
end get_heading;
function get_description (
keyword_id in cr_keywords.keyword_id%TYPE
) return varchar2
is
v_description varchar2(4000);
begin
select description into v_description from cr_keywords
where keyword_id = content_keyword.get_description.keyword_id;
return v_description;
end get_description;
procedure set_heading (
keyword_id in cr_keywords.keyword_id%TYPE,
heading in cr_keywords.heading%TYPE
)
is
begin
update cr_keywords set
heading = set_heading.heading
where
keyword_id = set_heading.keyword_id;
end set_heading;
procedure set_description (
keyword_id in cr_keywords.keyword_id%TYPE,
description in cr_keywords.description%TYPE
)
is
begin
update cr_keywords set
description = set_description.description
where
keyword_id = set_description.keyword_id;
end set_description;
function is_leaf (
keyword_id in cr_keywords.keyword_id%TYPE
) return varchar2
is
v_leaf varchar2(1);
cursor c_leaf_cur is
select
'f'
from
cr_keywords k
where
k.parent_id = is_leaf.keyword_id;
begin
open c_leaf_cur;
fetch c_leaf_cur into v_leaf;
if c_leaf_cur%NOTFOUND then
v_leaf := 't';
end if;
close c_leaf_cur;
return v_leaf;
end is_leaf;
function new (
heading in cr_keywords.heading%TYPE,
description in cr_keywords.description%TYPE default null,
parent_id in cr_keywords.parent_id%TYPE default null,
keyword_id in cr_keywords.keyword_id%TYPE default null,
creation_date in acs_objects.creation_date%TYPE
default sysdate,
creation_user in acs_objects.creation_user%TYPE
default null,
creation_ip in acs_objects.creation_ip%TYPE default null,
object_type in acs_object_types.object_type%TYPE default 'content_keyword'
) return cr_keywords.keyword_id%TYPE
is
v_id integer;
begin
v_id := acs_object.new (object_id => keyword_id,
context_id => parent_id,
object_type => object_type,
creation_date => creation_date,
creation_user => creation_user,
creation_ip => creation_ip);
insert into cr_keywords
(heading, description, keyword_id, parent_id)
values
(heading, description, v_id, parent_id);
return v_id;
end new;
procedure del (
keyword_id in cr_keywords.keyword_id%TYPE
)
is
v_item_id integer;
cursor c_rel_cur is
select item_id from cr_item_keyword_map
where keyword_id = content_keyword.del.keyword_id;
begin
open c_rel_cur;
loop
fetch c_rel_cur into v_item_id;
exit when c_rel_cur%NOTFOUND;
item_unassign(v_item_id, content_keyword.del.keyword_id);
end loop;
close c_rel_cur;
acs_object.del(keyword_id);
end del;
procedure item_assign (
item_id in cr_items.item_id%TYPE,
keyword_id in cr_keywords.keyword_id%TYPE,
context_id in acs_objects.context_id%TYPE default null,
creation_user in acs_objects.creation_user%TYPE default null,
creation_ip in acs_objects.creation_ip%TYPE default null
)
is
v_dummy integer;
begin
-- Do nothing if the keyword is assigned already
select decode(count(*),0,0,1) into v_dummy from dual
where exists (select 1 from cr_item_keyword_map
where item_id=item_assign.item_id
and keyword_id=item_assign.keyword_id);
if v_dummy > 0 then
-- previous assignment exists
return;
end if;
insert into cr_item_keyword_map (
item_id, keyword_id
) values (
item_id, keyword_id
);
end item_assign;
procedure item_unassign (
item_id in cr_items.item_id%TYPE,
keyword_id in cr_keywords.keyword_id%TYPE
) is
begin
delete from cr_item_keyword_map
where item_id = item_unassign.item_id
and keyword_id = item_unassign.keyword_id;
end item_unassign;
function is_assigned (
item_id in cr_items.item_id%TYPE,
keyword_id in cr_keywords.keyword_id%TYPE,
recurse in varchar2 default 'none'
) return varchar2
is
v_ret varchar2(1);
begin
-- Look for an exact match
if recurse = 'none' then
declare
begin
select 't' into v_ret from cr_item_keyword_map
where item_id = is_assigned.item_id
and keyword_id = is_assigned.keyword_id;
return 't';
exception when no_data_found then
return 'f';
end;
end if;
-- Look from specific to general
if recurse = 'up' then
begin
select 't' into v_ret from dual where exists (select 1 from
(select keyword_id from cr_keywords
connect by parent_id = prior keyword_id
start with keyword_id = is_assigned.keyword_id
) t, cr_item_keyword_map m
where
t.keyword_id = m.keyword_id
and
m.item_id = is_assigned.item_id);
return 't';
exception when no_data_found then
return 'f';
end;
end if;
if recurse = 'down' then
begin
select 't' into v_ret from dual where exists ( select 1 from
(select keyword_id from cr_keywords
connect by prior parent_id = keyword_id
start with keyword_id = is_assigned.keyword_id
) t, cr_item_keyword_map m
where
t.keyword_id = m.keyword_id
and
m.item_id = is_assigned.item_id);
return 't';
exception when no_data_found then
return 'f';
end;
end if;
-- Tried none, up and down - must be an invalid parameter
raise_application_error (-20000, 'The recurse parameter to ' ||
'content_keyword.is_assigned should be ''none'', ''up'' or ''down''.');
end is_assigned;
function get_path (
keyword_id in cr_keywords.keyword_id%TYPE
) return varchar2
is
v_path varchar2(4000) := '';
v_is_found varchar2(1) := 'f';
cursor c_keyword_cur is
select
heading
from (
select
heading, level as tree_level
from cr_keywords
connect by prior parent_id = keyword_id
start with keyword_id = get_path.keyword_id
)
order by
tree_level desc;
v_heading cr_keywords.heading%TYPE;
begin
open c_keyword_cur;
loop
fetch c_keyword_cur into v_heading;
exit when c_keyword_cur%NOTFOUND;
v_is_found := 't';
v_path := v_path || '/' || v_heading;
end loop;
close c_keyword_cur;
if v_is_found = 'f' then
return null;
else
return v_path;
end if;
end get_path;
end content_keyword;
/
show errors
-- Ensure that the context_id in acs_objects is always set to the
-- parent_id in cr_keywords
create or replace trigger cr_keywords_update_tr
after update of parent_id on cr_keywords for each row
begin
update acs_objects set context_id = :new.parent_id
where object_id = :new.keyword_id;
end cr_keywords_update_tr;
/
show errors
-- Data model to support content repository of the ArsDigita
-- Publishing System
-- Copyright (C) 1999-2000 ArsDigita Corporation
-- Author: Karl Goldstein (karlg@arsdigita.com)
-- $Id$
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
create or replace package content is
procedure string_to_blob(
s varchar2, blob_loc blob)
as language
java
name
'com.arsdigita.content.Util.stringToBlob(
java.lang.String, oracle.sql.BLOB
)';
procedure string_to_blob_size(
s varchar2, blob_loc blob, blob_size number)
as language
java
name
'com.arsdigita.content.Util.stringToBlob(
java.lang.String, oracle.sql.BLOB, int
)';
function blob_to_string(
blob_loc blob) return varchar2
as language
java
name
'com.arsdigita.content.Util.blobToString(
oracle.sql.BLOB
) return java.lang.String';
procedure blob_to_file(
s varchar2, blob_loc blob)
as language
java
name
'com.arsdigita.content.Util.blobToFile(
java.lang.String, oracle.sql.BLOB
)';
end content;
/
show errors
This diff is collapsed.
This diff is collapsed.
-- Data model to support release scheduling of items in the content
-- repository of the ArsDigita Publishing System
-- Copyright (C) 1999-2000 ArsDigita Corporation
-- Author: Karl Goldstein (karlg@arsdigita.com)
-- $Id$
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
prompt *** Preparing for scheduled updates to live content...
create or replace trigger cr_scheduled_release_tr
before insert on cr_scheduled_release_job
for each row
begin
raise_application_error(-20000,
'Inserts are not allowed into cr_scheduled_release_job.'
);
end;
/
show errors
-- Update the publishing status for items that are due to be released
-- or expired.
create or replace procedure cr_scheduled_release_exec is
last_exec date;
this_exec date := sysdate;
cursor start_cur is
select
p.item_id, live_revision
from
cr_release_periods p, cr_items i
where
start_when between last_exec and sysdate
and
p.item_id = i.item_id;
cursor end_cur is
select
p.item_id, live_revision
from
cr_release_periods p, cr_items i
where
end_when between last_exec and sysdate
and
p.item_id = i.item_id;
items_released integer := 0;
items_expired integer := 0;
err_num integer := sqlcode;
err_msg varchar2(500) := substr(sqlerrm, 1, 500);
begin
begin
select last_exec into last_exec from cr_scheduled_release_job;
for item_rec in start_cur loop
-- update publish status
update cr_items
set publish_status = 'live'
where
item_id = item_rec.item_id;
items_released := items_released + 1;
end loop;
for item_rec in end_cur loop
-- update publish status
update cr_items
set publish_status = 'expired'
where
item_id = item_rec.item_id;
items_expired := items_expired + 1;
end loop;
exception
when others then
err_num := SQLCODE;
err_msg := substr(SQLERRM, 1, 500);
end;
-- keep a record of the update
insert into cr_scheduled_release_log (
items_released, items_expired, err_num, err_msg
) values (
items_released, items_expired, err_num, err_msg
);
-- Reset the last time of execution to start of processing
update cr_scheduled_release_job set last_exec = this_exec;
-- Table was growing without bound (OpenACS DanW)
delete from cr_scheduled_release_log
where exec_date < sysdate - 4*7;
commit;
end cr_scheduled_release_exec;
/
show errors
-- initialize the scheduled publication job
-- job scheduling moved to aolserver (OpenACS - DanW)
-- declare
-- v_job_id integer;
-- interval integer := 15;
-- cursor job_cur is
-- select job from user_jobs
-- where what = 'cr_scheduled_release_exec;';
-- begin
-- open job_cur;
-- fetch job_cur into v_job_id;
-- if job_cur%NOTFOUND then
-- dbms_output.put_line('
-- Submitting job to process scheduled updates to live content...');
-- dbms_job.submit(
-- job => v_job_id,
-- what => 'cr_scheduled_release_exec;',
-- next_date => sysdate,
-- interval => 'sysdate + ' || (interval/24/60)
-- );
-- update cr_scheduled_release_job set job_id = v_job_id;
-- else
-- dbms_job.change(
-- job => v_job_id,
-- what => 'cr_scheduled_release_exec;',
-- next_date => sysdate,
-- interval => 'sysdate + ' || (interval/24/60)
-- );
-- end if;
-- end;
-- /
-- show errors
------------------------------------------------------------
-- Set up an index with INSO filtering on the content column
------------------------------------------------------------
set serveroutput on
declare
v_exists integer;
begin
-- Check whether the preference already exists
select decode(count(*),0,0,1) into v_exists from ctx_user_preferences
where pre_name = 'CONTENT_FILTER_PREF';
if v_exists = 0 then
dbms_output.put_line('Creating content filter preference...');
ctx_ddl.create_preference
(
preference_name => 'CONTENT_FILTER_PREF',
object_name => 'INSO_FILTER'
);
end if;
end;
/
create index cr_rev_content_index on cr_revisions ( content )
indextype is ctxsys.context
parameters ('FILTER content_filter_pref' );
-- DRB: Use the "online" version if you have Oracle Enterprise Edition
-- alter index cr_rev_content_index rebuild online parameters ('sync');
alter index cr_rev_content_index rebuild parameters ('sync');
------------------------------------------------------------
-- Set up an XML index for searching attributes
------------------------------------------------------------
-- To find the word company in the title only:
-- select revision_id,score(1)
-- from cr_revision_attributes
-- where contains(attributes, 'company WITHIN title', 1) > 0;
-- use a direct datastore rather than setting up a user datastore
-- this avoids having to generate an XML document for every
-- revision every time the index is rebuilt. It also avoids the
-- cumbersome manual process of setting up a user datastore.
create or replace package content_search is
procedure update_attribute_index;
end content_search;
/
show errors
create or replace package body content_search is
procedure update_attribute_index is
begin
for c1 in (select revision_id from cr_revisions r where not exists (
select 1 from cr_revision_attributes a
where a.revision_id = r.revision_id)) loop
content_revision.index_attributes(c1.revision_id);
commit;
end loop;
end update_attribute_index;
end;
/
show errors
declare
v_exists integer;
begin
-- Check whether the section group already exists
select decode(count(*),0,0,1) into v_exists from ctx_user_section_groups
where sgp_name = 'AUTO';
if v_exists = 0 then
dbms_output.put_line('Creating auto section group for attribute index...');
ctx_ddl.create_section_group('auto', 'AUTO_SECTION_GROUP');
end if;
end;
/
create index cr_rev_attribute_index on cr_revision_attributes ( attributes )
indextype is ctxsys.context
parameters ('filter ctxsys.null_filter section group auto' );
begin
content_search.update_attribute_index;
end;
/
show errors
-- DRB: Use the "online" version if you have Oracle Enterprise Edition
-- alter index cr_rev_attribute_index rebuild online parameters ('sync');
alter index cr_rev_attribute_index rebuild parameters ('sync');
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
-- Data model to support XML exchange with thecontent repository of
-- the ArsDigita Community System
-- Copyright (C) 1999-2000 ArsDigita Corporation
-- Author: Karl Goldstein (karlg@arsdigita.com)
-- $Id$
-- This is free software distributed under the terms of the GNU Public
-- License. Full text of the license is available from the GNU Project:
-- http://www.fsf.org/copyleft/gpl.html
-- A sequence for uniquely identifying uploaded XML documents until
-- they are inserted into the repository
create sequence cr_xml_doc_seq;
create global temporary table cr_xml_docs (
doc_id integer primary key,
doc CLOB
) on commit delete rows;
comment on table cr_xml_docs is '
A temporary table for holding uploaded XML documents for the
duration of a transaction, until they can be inserted into
the content repository.
';
update acs_attributes
set datatype = 'keyword'
where attribute_name = 'name'
and object_type = 'content_item';
update acs_attributes
set datatype = 'keyword'
where attribute_name = 'locale'
and object_type = 'content_item';
update acs_attributes
set datatype = 'text'
where attribute_name = 'title'
and object_type = 'content_revision';
update acs_attributes
set datatype = 'text'
where attribute_name = 'description'
and object_type = 'content_revision';
update acs_attributes
set datatype = 'text'
where attribute_name = 'mime_type'
and object_type = 'content_revision';
update acs_attributes
set datatype = 'integer'
where attribute_name = 'width'
and object_type = 'image';
update acs_attributes
set datatype = 'integer'
where attribute_name = 'height'
and object_type = 'image';
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
--
-- CR upgrade script.
--
@@ packages-create.sql
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
--
-- Adds the */* mime type as "Unknown" (added to content-create.sql by lars
--
insert into cr_mime_types(label, mime_type, file_extension) select 'Unkown', '*/*', '' from dual where not exists (select 1 from cr_mime_types where mime_type = '*/*');
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
create or replace view content_item_globals as
select -100 as c_root_folder_id
from dual;
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment