~netlandish/links

b416e0cdd7e3d01f48c1479a76f7a1b43fd94471 — Peter Sanchez 12 days ago 63b52bd
Adding restrictions to avoid saving multiple bookmarks with the same organization. Now the system will not add the same link twice to the same organization.

Changelog-changed: No longer allowing duplicate bookmarks to be saved
  under the same organization.
Signed-off-by: Peter Sanchez <peter@netlandish.com>
M api/api_test.go => api/api_test.go +2 -2
@@ 516,7 516,7 @@ func TestAPI(t *testing.T) {

		orgLinks, err = models.GetOrgLinks(dbCtx, &database.FilterOptions{})
		c.NoError(err)
		c.Equal(4, len(orgLinks))
		c.Equal(3, len(orgLinks))

		tags, err = models.GetTags(dbCtx, &database.FilterOptions{})
		c.NoError(err)


@@ 742,7 742,7 @@ func TestAPI(t *testing.T) {
		op.Var("slug", "personal-org")
		err := links.Execute(ctx, op, &result)
		c.NoError(err)
		c.Equal(2, len(result.OrgLinks.Result))
		c.Equal(1, len(result.OrgLinks.Result))

		op = gqlclient.NewOperation(q)
		op.Var("slug", "business_org")

M cmd/migrations.go => cmd/migrations.go +7 -0
@@ 38,5 38,12 @@ func GetMigrations() []migrate.Migration {
			0,
			links.MigrateFS,
		),
		migrate.FSFileMigration(
			"0003_add_org_links_unique",
			"migrations/0003_add_org_links_unique.up.sql",
			"migrations/0003_add_org_links_unique.down.sql",
			0,
			links.MigrateFS,
		),
	}
}

M core/import.go => core/import.go +25 -3
@@ 350,6 350,24 @@ func importOrgLinks(ctx context.Context, objAdapter *importAdapter, baseURLMap m
	if len(orgLinks) == 0 {
		return nil
	}

	orgLinks = func() []*models.OrgLink {
		oMap := make(map[string]bool)
		nLinks := make([]*models.OrgLink, 0)

		for _, ol := range orgLinks {
			olId := fmt.Sprintf("%d:%d", ol.BaseURLID.Int64, ol.OrgID)
			if _, ok := oMap[olId]; ok {
				// Found a duplicate, continue
				continue
			} else {
				oMap[olId] = true
				nLinks = append(nLinks, ol)
			}
		}
		return nLinks
	}()

	err := models.OrgLinkStoreBatch(ctx, orgLinks)
	if err != nil {
		return err


@@ 407,7 425,10 @@ func ImportFromPinBoard(ctx context.Context, path string,
	billEnabled := links.BillingEnabled(srv.Config)

	for {
		var pinBoardList []*pinBoardObj
		var (
			pinBoardList []*pinBoardObj
			count        int
		)
		for dcode.More() {
			var pbObj *pinBoardObj
			err := dcode.Decode(&pbObj)


@@ 416,12 437,13 @@ func ImportFromPinBoard(ctx context.Context, path string,
				continue
			}
			pinBoardList = append(pinBoardList, pbObj)
			if len(pinBoardList) == step {
			count++
			if count == step {
				break
			}
		}

		listlen := len(pinBoardList)
		listlen := count
		if listlen > 0 {
			adapter := &importAdapter{
				elementType: pinBoardType,

A migrations/0003_add_org_links_unique.down.sql => migrations/0003_add_org_links_unique.down.sql +1 -0
@@ 0,0 1,1 @@
ALTER TABLE org_links DROP CONSTRAINT unique_base_url_org;

A migrations/0003_add_org_links_unique.up.sql => migrations/0003_add_org_links_unique.up.sql +13 -0
@@ 0,0 1,13 @@
-- Necesary for any existing duplicates
WITH duplicate_cte AS (
    SELECT id, 
           ROW_NUMBER() OVER (PARTITION BY base_url_id, org_id ORDER BY created_on DESC) AS row_num
    FROM org_links
)
DELETE FROM org_links
WHERE id IN (
    SELECT id FROM duplicate_cte WHERE row_num > 1
);

-- Now create the constraint
ALTER TABLE org_links ADD CONSTRAINT unique_base_url_org UNIQUE (base_url_id, org_id);

M migrations/test_migration.up.sql => migrations/test_migration.up.sql +3 -1
@@ 9,11 9,13 @@ INSERT INTO organizations (owner_id, name, slug, settings) VALUES (2, 'api test 

INSERT INTO base_urls (url, hash) VALUES ('http://base.com', 'abcdefg');

INSERT INTO base_urls (url, hash) VALUES ('http://base2.com', 'abcdefg2');

INSERT INTO org_links (title, url, base_url_id, user_id, org_id, visibility, hash) VALUES
    ('Public Business url', 'http://base.com?vis=public', 1, 1, 2, 'PUBLIC', 'hash1');

INSERT INTO org_links (title, url, base_url_id, user_id, org_id, visibility, hash) VALUES
    ('Private Business url', 'http://base.com?vis=private', 1, 1, 2, 'PRIVATE', 'hash2');
    ('Private Business url', 'http://base2.com?vis=private', 2, 1, 2, 'PRIVATE', 'hash2');

INSERT INTO domains (name, lookup_name, org_id, level, service, status) VALUES ('short domain', 'short.domain.org', 1, 'SYSTEM', 'SHORT', 'APPROVED');
INSERT INTO domains (name, lookup_name, org_id, service, status, level) VALUES ('listing domain', 'list.domain.org', 1, 'LIST', 'APPROVED', 'USER');

M models/org_link.go => models/org_link.go +8 -4
@@ 148,10 148,12 @@ func (o *OrgLink) Store(ctx context.Context) error {
					"unread", "starred", "archive_url", "type", "hash").
				Values(o.Title, o.URL, o.Description, o.BaseURLID, o.OrgID, o.UserID, o.Visibility,
					o.Unread, o.Starred, o.ArchiveURL, o.Type, o.Hash).
				Suffix(`RETURNING id, created_on, updated_on`).
				Suffix(`ON CONFLICT (base_url_id, org_id) DO UPDATE SET 
					updated_on = CURRENT_TIMESTAMP 
					RETURNING id, hash, created_on, updated_on`).
				PlaceholderFormat(sq.Dollar).
				RunWith(tx).
				ScanContext(ctx, &o.ID, &o.CreatedOn, &o.UpdatedOn)
				ScanContext(ctx, &o.ID, &o.Hash, &o.CreatedOn, &o.UpdatedOn)
		} else {
			err = sq.
				Update("org_links").


@@ 283,7 285,9 @@ func OrgLinkStoreBatch(ctx context.Context, links []*OrgLink) error {
				link.UserID, link.Visibility, link.Hash, link.Type, link.Unread)
		}
		rows, err := batch.
			Suffix(`RETURNING id`).
			Suffix(`ON CONFLICT (base_url_id, org_id) DO UPDATE SET 
				updated_on = CURRENT_TIMESTAMP 
				RETURNING id, hash`).
			PlaceholderFormat(sq.Dollar).
			RunWith(tx).
			QueryContext(ctx)


@@ 296,7 300,7 @@ func OrgLinkStoreBatch(ctx context.Context, links []*OrgLink) error {
		// Add ID's to new entries
		for _, link := range links {
			rows.Next()
			if err = rows.Scan(&link.ID); err != nil {
			if err = rows.Scan(&link.ID, &link.Hash); err != nil {
				return err
			}
		}

M models/schema.sql => models/schema.sql +1 -0
@@ 174,6 174,7 @@ CREATE TABLE org_links (
  archive_url TEXT DEFAULT '',
  created_on TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
  updated_on TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
  CONSTRAINT unique_base_url_org UNIQUE (base_url_id, org_id)
);

CREATE INDEX org_links_id_idx ON org_links (id);

Do not follow this link